xxxxxxxxxx
def postdownloading_callback(self, input_kwargs, *args):
r = args[-1]
url = args[-2]
if not r:
print("Error: No response\n")
return
song_id = get_song_id(url)
if r.error:
print(f"Error: {r.error}", file=sys.stderr)
if any(err in r.error for err in errors):
print(f"Error: Beatmap not found: {url}\n", file=sys.stderr)
if any(rerr in r.error for rerr in retry_errors):
print(f"{url}\nAdded retry after queue end\n")
if input_kwargs and "proxies" in input_kwargs:
proxy = re.search(r"//([^/]*)/",
input_kwargs["proxies"]["http"]).group(1)
if proxy in self._proxy.proxies:
self._proxy.proxies.remove(proxy)
if len(self._proxy.proxies) < 2:
print("No valid proxies, exiting\n")
del self._proxy
return self.retry_download(url)
return
if r.url == 'https://osu.ppy.sh/p/error':
print("Error: Osu site internal error", file=sys.stderr)
return Path(r.out_file).unlink()
try:
old_filename = Path(r.out_file).resolve(strict=True)
name = r.info._headers[6][1].split('"')[1::2][0]
name = re.sub(r'[^\w_.)( -]', '', name)
name = old_filename.parent.joinpath(name)
old_filename.replace(name)
except Exception as e:
print(f"Error: Failed to rename beatmap: {url}\n{e}",
file=sys.stderr)
pass
else:
if self.auto_start:
os.startfile(name)
print(f"Successfully downloaded: {name.stem}")
del old_filename, name
del r, song_id
xxxxxxxxxx
def clean_template(template, removals=None, cfmt=cfmt):
if removals is None:
removals = ['temp', 'dx', 'dy']
d = cfmt.format(template, defaultdict(str))
for r in removals:
d = d.replace('[{}=]'.format(r), '')
z = re.sub(r"__+", "_", d)
z = z.replace('_.', '.')
e = z.replace('[', '')
e = e.replace(']', '')
e = e.replace('(', '')
e = e.replace(')', '')
f = Path(e).as_posix()
f = f.replace('/_', '/')
return f
xxxxxxxxxx
def update(generated_folder, destination_folder, global_conf, local_conf):
"""Update data from generated to final folder"""
wrapper_files_or_dirs = merge_options(global_conf, local_conf, "wrapper_filesOrDirs") or []
delete_files_or_dirs = merge_options(global_conf, local_conf, "delete_filesOrDirs") or []
generated_relative_base_directory = local_conf.get('generated_relative_base_directory') or \
global_conf.get('generated_relative_base_directory')
client_generated_path = Path(generated_folder)
if generated_relative_base_directory:
client_generated_path = next(client_generated_path.glob(generated_relative_base_directory))
for wrapper_file_or_dir in wrapper_files_or_dirs:
for file_path in Path(destination_folder).glob(wrapper_file_or_dir):
relative_file_path = file_path.relative_to(destination_folder)
file_path_dest = client_generated_path.joinpath(str(relative_file_path))
file_path.replace(file_path_dest)
for delete_file_or_dir in delete_files_or_dirs:
for file_path in client_generated_path.glob(delete_file_or_dir):
if file_path.is_file():
file_path.unlink()
else:
shutil.rmtree(str(file_path))
shutil.rmtree(destination_folder)
client_generated_path.replace(destination_folder)
xxxxxxxxxx
def validate_io_streams(input_file: pathlib.Path, output_file: pathlib.Path) -> bool:
"""
Ensure I/O paths are valid and clean for program
:param input_file: Input file (JSON)
:param output_file: Output file (SQLite)
:return: Good to continue status
"""
if not input_file.is_file():
LOGGER.fatal("Input file {} does not exist.".format(input_file))
return False
output_file.parent.mkdir(exist_ok=True)
if output_file.is_file():
LOGGER.warning("Output file {} exists already, moving it.".format(output_file))
output_file.replace(output_file.parent.joinpath(output_file.name + ".old"))
return True
xxxxxxxxxx
def _extract_to_folder(db_archive_path, db_folder):
with tarfile.open(db_archive_path) as tar_archive:
for member in tar_archive.getmembers():
if not member.isreg():
continue
# Will skip the dirs to extract only file objects
tar_archive.extract(member, db_folder)
# The files are extract to a subfolder (with a date in the name)
# We want to move these into the main folder above this.
targetname = Path(member.name).name
if targetname != member.name:
curr_file = Path(db_folder).joinpath(member.name)
extr_folder = Path(db_folder).joinpath(member.name).parent
curr_file.replace(Path(db_folder).joinpath(targetname))
# if the folder is empty, remove it
if not list(extr_folder.glob("*")):
extr_folder.rmdir()
xxxxxxxxxx
def _transform_mobile_v1_2_to_pprf_2_0(rec_dir: str):
_generate_pprf_2_0_info_file(rec_dir)
# rename info.csv file to info.mobile.csv
info_csv = Path(rec_dir) / "info.csv"
new_path = info_csv.with_name("info.mobile.csv")
info_csv.replace(new_path)
recording = PupilRecording(rec_dir)
# patch world.intrinsics
# NOTE: could still be worldless at this point
update_utils._try_patch_world_instrinsics_file(
rec_dir,
recording.files().mobile().world().videos())
_rename_mobile_files(recording)
_rewrite_timestamps(recording)
xxxxxxxxxx
def _fmt(path):
text = Path(path).read_text().strip()
# Remove repositories tag, as it's a detail that's always the same
text = re.sub(r" +<repositories>.*?</repositories>.*?\n <o",
" <o",
text,
flags=re.S | re.M)
text = text.replace("\n\n", "\n")
return text
xxxxxxxxxx
def move_file(self, source, destination, overwrite=False):
"""Move file from source path to destination path,
optionally overwriting the destination.
:param source: source file path for moving
:param destination: path to move to
:param overwrite: replace destination file if it already exists
"""
src = Path(source)
dst = Path(destination)
if not src.is_file():
raise FileNotFoundError(f"Source {src} is not a file")
if dst.exists() and not overwrite:
raise FileExistsError(f"Destination {dst} already exists")
src.replace(dst)
self.logger.info("Moved file: %s -> %s", src, dst)
xxxxxxxxxx
def compile_tex(tex_file, tex_compiler, output_format):
"""Compiles a tex_file into a .dvi or a .xdv or a .pdf
Parameters
----------
tex_file : :class:`str`
File name of TeX file to be typeset.
tex_compiler : :class:`str`
String containing the compiler to be used, e.g. ``pdflatex`` or ``lualatex``
output_format : :class:`str`
String containing the output format generated by the compiler, e.g. ``.dvi`` or ``.pdf``
Returns
-------
:class:`str`
Path to generated output file in desired format (DVI, XDV or PDF).
"""
result = tex_file.replace(".tex", output_format)
result = Path(result).as_posix()
tex_file = Path(tex_file).as_posix()
tex_dir = Path(config.get_dir("tex_dir")).as_posix()
if not os.path.exists(result):
command = tex_compilation_command(tex_compiler, output_format,
tex_file, tex_dir)
exit_code = os.system(command)
if exit_code != 0:
log_file = tex_file.replace(".tex", ".log")
if not Path(log_file).exists():
raise RuntimeError(
f"{tex_compiler} failed but did not produce a log file. "
"Check your LaTeX installation.")
with open(log_file, "r") as f:
log = f.readlines()
log_error_pos = [
ind for (ind, line) in enumerate(log)
if line.startswith("!")
]
if log_error_pos:
logger.error(
f"LaTeX compilation error! {tex_compiler} reports:")
for lineno in log_error_pos:
# search for a line starting with "l." in the next
# few lines past the error; otherwise just print some lines.
printed_lines = 1
for _ in range(10):
if log[lineno + printed_lines].startswith("l."):
break
printed_lines += 1
for line in log[lineno:lineno + printed_lines + 1]:
logger.error(line)
raise ValueError(f"{tex_compiler} error converting to"
f" {output_format[1:]}. See log output above or"
f" the log file: {log_file}")
return result
xxxxxxxxxx
def _make_pt_vrt(self, layer_name, out_dir):
"""
Make a vrt file for station point ratios in summary CSV for a
specific layer or field name. Save to out_dir. Used for gdal_grid
interpolation commands of scatter point data.
"""
if not Path(out_dir).is_dir():
os.makedirs(out_dir)
# old method
#summary_file = Path(self.summary_csv_path).name
point_data = Path(self.summary_csv_path).name.replace(
'.csv', '_tmp.csv')
# make tmp point data csv for given layer, drop missing values
df = pd.read_csv(self.summary_csv_path)
df = df[['STATION_LAT', 'STATION_LON', layer_name]]
df = df[df[layer_name] != -999]
tmp_out_path = str(Path(self.summary_csv_path).parent / point_data)
df.to_csv(tmp_out_path, index=False)
# if out_dir adjust summary CSV path by prepending parent dirs
tmp = copy(Path(out_dir))
n_parent_dirs = 0
while len(Path(tmp).parents) > 0:
if tmp.name == self.summary_csv_path.parent.name:
break
tmp = Path(tmp).parent
n_parent_dirs+=1
path_to_data = str(
Path(
'..{}'.format(os.sep)*n_parent_dirs
).joinpath(point_data)
)
out_file = '{}.vrt'.format(layer_name) # keep it simple just layer name
# VRT format for reading CSV point data
root = ET.Element('OGRVRTDataSource')
OGRVRTLayer = ET.SubElement(root, 'OGRVRTLayer',
name=point_data.replace('.csv', ''))
# set all fields, SRS WGS84, point geom
ET.SubElement(OGRVRTLayer, 'SrcDataSource').text = path_to_data
ET.SubElement(OGRVRTLayer, 'LayerSRS').text = 'epsg:4326'
ET.SubElement(OGRVRTLayer, 'GeometryType').text = 'wkbPoint'
ET.SubElement(OGRVRTLayer, 'GeometryField', encoding='PointFromColumns',
x='STATION_LON', y='STATION_LAT', z=layer_name)
tree = ET.ElementTree(root)
# indent xml, save to out_dir
out_xml_str = _prettify(root)
out_path = os.path.join(out_dir, out_file)
with open(out_path, 'w') as outf:
outf.write(out_xml_str)