forked from Unidata/TdsConfig
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathbuild.py
executable file
·167 lines (139 loc) · 6.01 KB
/
build.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
#!/usr/bin/env python
from __future__ import print_function
from io import open
import os
import os.path
import shutil
build_file = 'build.info'
def get_config_includes(path):
'Get files and directories to be included'
# If we're given a directory, add filename
if os.path.isdir(path):
configfile = os.path.join(path, build_file)
else:
configfile = path
# Return all lines in the file, if it exists, plus this path
ret = []
if os.path.exists(configfile):
with open(configfile, 'r') as f:
ret.extend(os.path.join(path, l.strip()) for l in f)
ret.append(path)
return ret
exclude = {build_file, 'fetch.sh', 'config.zip'}
def get_files(path):
'''
Get relative and full paths for the files to be included.
If we're given a file, return the base filename as well as full
(normalized) path.
If given a directory, walk it and return the full path to the file
as well as its path relative to the directory passed in.
'''
if os.path.isfile(path):
yield os.path.split(path)[-1], os.path.normpath(path)
else:
for root, dirs, files in os.walk(path):
for name in files:
if name not in exclude:
fullpath = os.path.join(root, name)
yield os.path.relpath(fullpath, path), fullpath
def write_script(scriptpath, configpath):
'Create script for downloading this config file'
lines = ['#!/bin/sh']
# On Sun machines, set path as needed
lines.append('unamestr=`uname`')
lines.append('if [[ "$unamestr" == "SunOS" ]]; then')
lines.append('\tPATH=/opt/csw/bin:/opt/jdk/bin:$PATH')
lines.append('fi\n')
# Enclosing script in '{}' forces it to be read into memory, dealing
# with the problem of the script being modified (via jar xf) while
# running.
lines.append('{')
# Fix any windows path separators
configfile = os.path.split(configpath)[-1]
configpath = configpath.replace('\\', '/')
lines.append(('\twget'
' --no-check-certificate'
' https://artifacts.unidata.ucar.edu/repository/downloads-tds-config/%s'
' -O %s')
% (configpath, configfile))
lines.append('\tjar xf %s' % os.path.split(configpath)[-1])
lines.append('\texit')
lines.append('}\n')
script = '\n'.join(lines)
# Using binary mode to prevent writing \r on windows
if os.path.exists(scriptpath):
with open(scriptpath, 'rb') as f:
if f.read() == script:
return
with open(scriptpath, 'wb') as f:
f.write(script.encode('utf-8'))
if __name__ == '__main__':
import argparse
import subprocess
import time
import zipfile
# Process directory on command line
parser = argparse.ArgumentParser(
description='Create THREDDS configuration sets.')
parser.add_argument('dirs', type=str, nargs='*',
help='Directories to create THREDDS configuration set')
parser.add_argument('--verbose', '-v', action='store_true',
help='Verbose output')
# Used to replace the [DATA_DIR] string in pqact and xml files
parser.add_argument('--datadir', '-d', type=bytes, default=b'/data/ldm/pub')
args = parser.parse_args()
# If we're not given a directory, just look at all the dirs for a
# config file.
if not args.dirs:
args.dirs = [d for d in os.listdir('.')
if os.path.isdir(d) and os.path.exists(os.path.join(d, build_file))]
for builddir in args.dirs:
print('Processing {}: '.format(builddir), end='')
# Assemble a map of the path in the zipfile to the corresponding
# path on our filesystem. All filepaths are relative to the
# containing directory so that files in included directories
# can be overridden by those later in the list
files = dict()
for d in get_config_includes(builddir):
if args.verbose: print("Adding directory:", d)
for fname,fullpath in get_files(d):
if args.verbose: print("Adding file:", fname, "->", fullpath)
files[fname] = fullpath
# Create config subdirectories for output
if not os.path.exists(builddir):
os.makedirs(builddir)
# Write wget script
outpath = os.path.join(builddir, 'config.zip')
script = 'fetch.sh'
scriptpath = os.path.join(builddir, script)
write_script(scriptpath, outpath)
# Include the script in the zipfile
files[script] = scriptpath
# Write these into the zipfile
with zipfile.ZipFile(outpath, 'w', zipfile.ZIP_DEFLATED) as outf:
for f,fullpath in sorted(files.items()):
# Read the content from the file. We need to write the data
# as a string to the zipfile so we can control file time
# and eliminate spurious zip changes
with open(fullpath, 'rb') as sourceFile:
data = sourceFile.read()
# look for files that could have the [DATA_DIR] macro and
# replace it with the correct value
if ('pqact' in f) or (f[-3:] == 'xml'):
data = data.replace(b'${DATA_DIR}', args.datadir)
# Set the modification time based on the last time the file
# was committed in git
unix_time = subprocess.check_output(['git', 'log', '-1',
'--format=%ct', fullpath])
if unix_time:
unix_time = int(unix_time)
else: # File hasn't been added to git yet
unix_time = int(os.stat('build.py').st_mtime)
mtime = time.localtime(unix_time)[:6]
zinfo = zipfile.ZipInfo(f, mtime)
zinfo.external_attr = 0o644 << 16
zinfo.compress_type = outf.compression
outf.writestr(zinfo, data)
print('wrote {}'.format(outpath))
# Delete the fetch script
os.remove(scriptpath)