rem
stringlengths
0
322k
add
stringlengths
0
2.05M
context
stringlengths
8
228k
expected_status_tree)
expected_status_tree, None, None, None, None, 1)
def update_props(): "receive properties via update" # Bootstrap sbox = sandbox(update_props) wc_dir = os.path.join (svntest.main.general_wc_dir, sbox) if svntest.actions.make_repo_and_wc(sbox): return 1 # Make a backup copy of the working copy wc_backup = wc_dir + 'backup' svntest.actions.duplicate_dir(wc_dir, wc_backup) # Add a property to a file and a directory mu_path = os.path.join(wc_dir, 'A', 'mu') H_path = os.path.join(wc_dir, 'A', 'D', 'H') svntest.main.run_svn('propset', 'blue', 'azul', mu_path) svntest.main.run_svn('propset', 'red', 'rojo', H_path) # Create expected output tree. output_list = [ [mu_path, None, {}, {'verb' : 'Changing'}], [ H_path, None, {}, {'verb' : 'Changing'}] ] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Created expected status tree. status_list = svntest.actions.get_virginal_status_list(wc_dir, 1) for item in status_list: item[3]['repos_rev'] = '2' # post-commit status if (item[0] == mu_path) or (item[0] == H_path): item[3]['wc_rev'] = '2' item[3]['status'] = '__' expected_status_tree = svntest.tree.build_generic_tree(status_list) # Commit the one file. if svntest.actions.run_and_verify_commit (wc_dir, expected_output_tree, expected_status_tree, None, None, None, None, None, wc_dir): return 1 # Create expected output tree for an update of the wc_backup. output_list = [ [os.path.join(wc_backup, mu_path), None, {}, {'status' : '_U'}], [os.path.join(wc_backup, H_path), None, {}, {'status' : '_U'}] ] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Create expected disk tree for the update. my_greek_tree = svntest.main.copy_greek_tree() my_greek_tree[2][2]['blue'] = 'azul' # A/mu my_greek_tree[16][2]['red'] = 'rojo' # A/D/H expected_disk_tree = svntest.tree.build_generic_tree(my_greek_tree) # Create expected status tree for the update. status_list = svntest.actions.get_virginal_status_list(wc_backup, '2') expected_status_tree = svntest.tree.build_generic_tree(status_list) # Do the update and check the results in three ways. return svntest.actions.run_and_verify_update(wc_backup, expected_output_tree, expected_disk_tree, expected_status_tree)
if target_ob.install == 'test' and bldtype == 'exe' \ and parser.get(target, 'testing') != 'skip': test_progs.append(tpath)
if target_ob.install == 'test' and bldtype == 'exe': test_deps.append(tpath) if parser.get(target, 'testing') != 'skip': test_progs.append(tpath)
def main(fname, oname=None, skip_depends=0): parser = ConfigParser.ConfigParser(_cfg_defaults) parser.read(fname) if oname is None: oname = os.path.splitext(os.path.basename(fname))[0] + '-outputs.mk' ofile = open(oname, 'w') ofile.write('# DO NOT EDIT -- AUTOMATICALLY GENERATED\n\n') errors = 0 targets = { } groups = { } # group name -> targets install = { } # install area name -> targets test_progs = [ ] file_deps = [ ] target_dirs = { } target_names = _filter_targets(parser.sections()) # PASS 1: collect the targets and some basic info for target in target_names: try: target_ob = Target(target, parser.get(target, 'path'), parser.get(target, 'install'), parser.get(target, 'type')) except GenMakeError, e: print e errors = 1 continue targets[target] = target_ob group = parser.get(target, 'group') if groups.has_key(group): groups[group].append(target_ob.output) else: groups[group] = [ target_ob.output ] itype = target_ob.install if install.has_key(itype): install[itype].append(target_ob) else: install[itype] = [ target_ob ] target_dirs[target_ob.path] = None if errors: sys.exit(1) # PASS 2: generate the outputs for target in target_names: target_ob = targets[target] path = target_ob.path bldtype = target_ob.type objext = target_ob.objext tpath = target_ob.output tfile = os.path.basename(tpath) if target_ob.install == 'test' and bldtype == 'exe' \ and parser.get(target, 'testing') != 'skip': test_progs.append(tpath) pats = parser.get(target, 'sources') if not pats: pats = _default_sources[bldtype] sources, s_errors = _collect_paths(pats, path) errors = errors or s_errors objects = [ ] for src in sources: if src[-2:] == '.c': objname = src[:-2] + objext objects.append(objname) file_deps.append((src, objname)) else: print 'ERROR: unknown file extension on', src errors = 1 retreat = _retreat_dots(path) libs = [ ] deps = [ ] for lib in string.split(parser.get(target, 'libs')): if lib in target_names: tlib = targets[lib] target_ob.deps.append(tlib) deps.append(tlib.output) dep_path = tlib.path if bldtype == 'lib': # we need to hack around a libtool problem: it cannot record a # dependency of one shared lib on another shared lib. ### fix this by upgrading to the new libtool 1.4 release... # strip "lib" from the front so we have -lsvn_foo if lib[:3] == 'lib': lib = lib[3:] libs.append('-L%s -l%s' % (retreat + os.path.join(dep_path, '.libs'), lib)) else: # linking executables can refer to .la files libs.append(retreat + os.path.join(dep_path, lib + '.la')) else: # something we don't know, so just include it directly libs.append(lib) targ_varname = string.replace(target, '-', '_') ldflags = parser.get(target, 'link-flags') objnames = string.join(map(os.path.basename, objects)) ofile.write('%s_DEPS = %s %s\n' '%s_OBJECTS = %s\n' '%s: $(%s_DEPS)\n' '\tcd %s && $(LINK) -o %s %s $(%s_OBJECTS) %s $(LIBS)\n\n' % (targ_varname, string.join(objects), string.join(deps), targ_varname, objnames, tpath, targ_varname, path, tfile, ldflags, targ_varname, string.join(libs))) custom = parser.get(target, 'custom') if custom == 'apache-mod': # special build, needing Apache includes ofile.write('# build these special -- use APACHE_INCLUDES\n') for src in sources: if src[-2:] == '.c': ofile.write('%s%s: %s\n\t$(COMPILE_APACHE_MOD)\n' % (src[:-2], objext, src)) ofile.write('\n') for g_name, g_targets in groups.items(): ofile.write('%s: %s\n\n' % (g_name, string.join(g_targets))) ofile.write('CLEAN_DIRS = %s\n' % string.join(target_dirs.keys())) cfiles = [ ] for target in targets.values(): # .la files are handled by the standard 'clean' rule; clean all the # other targets if target.output[-3:] != '.la': cfiles.append(target.output) ofile.write('CLEAN_FILES = %s\n\n' % string.join(cfiles)) for area, inst_targets in install.items(): files = [ ] for t in inst_targets: files.append(t.output) if area == 'apache-mod': ofile.write('install-mods-shared: %s\n' % (string.join(files),)) la_tweaked = { } for file in files: base, ext = os.path.splitext(os.path.basename(file)) name = string.replace(base, 'libmod_', '') ofile.write('\t$(INSTALL_MOD_SHARED) -n %s %s\n' % (name, file)) if ext == '.la': la_tweaked[file + '-a'] = None for t in inst_targets: for dep in t.deps: bt = dep.output if bt[-3:] == '.la': la_tweaked[bt + '-a'] = None la_tweaked = la_tweaked.keys() s_files, s_errors = _collect_paths(parser.get('static-apache', 'paths')) errors = errors or s_errors ofile.write('\ninstall-mods-static: %s\n' '\t$(mkinstalldirs) %s\n' % (string.join(la_tweaked + s_files), os.path.join('$(APACHE_TARGET)', '.libs'))) for file in la_tweaked: dirname, fname = os.path.split(file) base = os.path.splitext(fname)[0] ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' '\t$(INSTALL_MOD_STATIC) %s %s\n' % (os.path.join(dirname, '.libs', base + '.a'), os.path.join('$(APACHE_TARGET)', '.libs', base + '.a'), file, os.path.join('$(APACHE_TARGET)', base + '.la'))) for file in s_files: ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' % (file, os.path.join('$(APACHE_TARGET)', os.path.basename(file)))) ofile.write('\n') elif area != 'test': ofile.write('install-%s: %s\n' '\t$(mkinstalldirs) $(%sdir)\n' % (area, string.join(files), area)) for file in files: ofile.write('\t$(INSTALL_%s) %s %s\n' % (string.upper(area), file, os.path.join('$(%sdir)' % area, os.path.basename(file)))) ofile.write('\n') includes, i_errors = _collect_paths(parser.get('includes', 'paths')) errors = errors or i_errors ofile.write('install-include: %s\n' '\t$(mkinstalldirs) $(includedir)\n' % (string.join(includes),)) for file in includes: ofile.write('\t$(INSTALL_INCLUDE) %s %s\n' % (file, os.path.join('$(includedir)', os.path.basename(file)))) ofile.write('\n# handy shortcut targets\n') for name, target in targets.items(): ofile.write('%s: %s\n' % (name, target.output)) ofile.write('\n') scripts, s_errors = _collect_paths(parser.get('test-scripts', 'paths')) errors = errors or s_errors ofile.write('TEST_PROGRAMS = %s\n\n' % string.join(test_progs + scripts)) if not skip_depends: # # Find all the available headers and what they depend upon. the # include_deps is a dictionary mapping a short header name to a tuple # of the full path to the header and a dictionary of dependent header # names (short) mapping to None. # # Example: # { 'short.h' : ('/path/to/short.h', # { 'other.h' : None, 'foo.h' : None }) } # # Note that this structure does not allow for similarly named headers # in per-project directories. SVN doesn't have this at this time, so # this structure works quite fine. (the alternative would be to use # the full pathname for the key, but that is actually a bit harder to # work with since we only see short names when scanning, and keeping # a second variable around for mapping the short to long names is more # than I cared to do right now) # include_deps = _create_include_deps(includes) for d in target_dirs.keys(): hdrs = glob.glob(os.path.join(d, '*.h')) if hdrs: more_deps = _create_include_deps(hdrs, include_deps) include_deps.update(more_deps) for src, objname in file_deps: hdrs = [ ] for short in _find_includes(src, include_deps): hdrs.append(include_deps[short][0]) ofile.write('%s: %s %s\n' % (objname, src, string.join(hdrs))) if errors: sys.exit(1)
diff_pure_repository_update_a_file ]
diff_pure_repository_update_a_file, diff_only_property_change ]
def diff_pure_repository_update_a_file(sbox): "pure repository diff update a file" if sbox.build(): return 1 wc_dir = sbox.wc_dir was_cwd = os.getcwd() os.chdir(wc_dir) # rev 2 update_a_file() svntest.main.run_svn(None, 'ci', '-m', '"log msg"') # rev 3 add_a_file_in_a_subdir() svntest.main.run_svn(None, 'ci', '-m', '"log msg"') # rev 4 add_a_file() svntest.main.run_svn(None, 'ci', '-m', '"log msg"') # rev 5 update_added_file() svntest.main.run_svn(None, 'ci', '-m', '"log msg"') svntest.main.run_svn(None, 'up', '-r2') os.chdir(was_cwd) url = svntest.main.test_area_url + '/' + svntest.main.current_repo_dir diff_output, err_output = svntest.main.run_svn(None, 'diff', '-r1:2', url) if check_update_a_file(diff_output): return 1 os.chdir(wc_dir) diff_output, err_output = svntest.main.run_svn(None, 'diff', '-r1:2') os.chdir(was_cwd) if check_update_a_file(diff_output): return 1 diff_output, err_output = svntest.main.run_svn(None, 'diff', '-r2:3', url) if check_add_a_file_in_a_subdir(diff_output): return 1 os.chdir(wc_dir) diff_output, err_output = svntest.main.run_svn(None, 'diff', '-r2:3') os.chdir(was_cwd) if check_add_a_file_in_a_subdir(diff_output): return 1 diff_output, err_output = svntest.main.run_svn(None, 'diff', '-r4:5', url) if check_update_added_file(diff_output): return 1 os.chdir(wc_dir) diff_output, err_output = svntest.main.run_svn(None, 'diff', '-r4:5') os.chdir(was_cwd) if check_update_added_file(diff_output): return 1 os.chdir(wc_dir) diff_output, err_output = svntest.main.run_svn(None, 'diff', '-rh') os.chdir(was_cwd) if check_add_a_file_in_a_subdir_reverse(diff_output): return 1 return 0
update_binary_file
update_binary_file, update_binary_file_2
def update_binary_file_2(): "update to an old revision of a binary files" sbox = sandbox(update_binary_file_2) wc_dir = os.path.join (svntest.main.general_wc_dir, sbox) if svntest.actions.make_repo_and_wc(sbox): return 1 # Suck up contents of a test .png file. fp = open("theta.png") theta_contents = fp.read() fp.close() # 102400 is svn_txdelta_window_size. We're going to make sure we # have at least 102401 bytes of data in our second binary file (for # no reason other than we have had problems in the past with getting # svndiff data out of the repository for files > 102400 bytes). # How? Well, we'll just keep doubling the binary contents of the # original theta.png until we're big enough. zeta_contents = theta_contents while(len(zeta_contents) < 102401): zeta_contents = zeta_contents + zeta_contents # Write our two files' contents out to disk, in A/theta and A/zeta. theta_path = os.path.join(wc_dir, 'A', 'theta') fp = open(theta_path, 'w') fp.write(theta_contents) fp.close() zeta_path = os.path.join(wc_dir, 'A', 'zeta') fp = open(zeta_path, 'w') fp.write(zeta_contents) fp.close() # Now, `svn add' those two files. svntest.main.run_svn(None, 'add', theta_path, zeta_path) # Created expected output tree for 'svn ci' output_list = [ [theta_path, None, {}, {'verb' : 'Adding' }], [zeta_path, None, {}, {'verb' : 'Adding' }] ] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Create expected status tree status_list = svntest.actions.get_virginal_status_list(wc_dir, '2') for item in status_list: item[3]['wc_rev'] = '1' status_list.append([theta_path, None, {}, {'status' : '__', 'locked' : ' ', 'wc_rev' : '2', 'repos_rev' : '2'}]) status_list.append([zeta_path, None, {}, {'status' : '__', 'locked' : ' ', 'wc_rev' : '2', 'repos_rev' : '2'}]) expected_status_tree = svntest.tree.build_generic_tree(status_list) # Commit the new binary filea, creating revision 2. if svntest.actions.run_and_verify_commit (wc_dir, expected_output_tree, expected_status_tree, None, None, None, None, None, wc_dir): return 1 # Make some mods to the binary files. svntest.main.file_append (theta_path, "foobar") new_theta_contents = theta_contents + "foobar" svntest.main.file_append (zeta_path, "foobar") new_zeta_contents = zeta_contents + "foobar" # Created expected output tree for 'svn ci' output_list = [ [theta_path, None, {}, {'verb' : 'Sending' }], [zeta_path, None, {}, {'verb' : 'Sending' }] ] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Create expected status tree status_list = svntest.actions.get_virginal_status_list(wc_dir, '3') for item in status_list: item[3]['wc_rev'] = '1' status_list.append([theta_path, None, {}, {'status' : '__', 'locked' : ' ', 'wc_rev' : '3', 'repos_rev' : '3'}]) status_list.append([zeta_path, None, {}, {'status' : '__', 'locked' : ' ', 'wc_rev' : '3', 'repos_rev' : '3'}]) expected_status_tree = svntest.tree.build_generic_tree(status_list) # Commit original working copy again, creating revision 3. if svntest.actions.run_and_verify_commit (wc_dir, expected_output_tree, expected_status_tree, None, None, None, None, None, wc_dir): return 1 # Create expected output tree for an update of wc_backup. output_list = [ [theta_path, None, {}, {'status' : 'U '}], [zeta_path, None, {}, {'status' : 'U '}] ] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Create expected disk tree for the update -- # look! binary contents, and a binary property! my_greek_tree = svntest.main.copy_greek_tree() my_greek_tree.append(['A/theta', theta_contents, {'svn:mime-type' : 'application/octet-stream'}, {}]) my_greek_tree.append(['A/zeta', zeta_contents, {'svn:mime-type' : 'application/octet-stream'}, {}]) expected_disk_tree = svntest.tree.build_generic_tree(my_greek_tree) # Create expected status tree for the update. status_list = svntest.actions.get_virginal_status_list(wc_dir, '3') for item in status_list: item[3]['wc_rev'] = '2' status_list.append([theta_path, None, {}, {'status' : '__', 'locked' : ' ', 'wc_rev' : '2', 'repos_rev' : '3'}]) status_list.append([zeta_path, None, {}, {'status' : '__', 'locked' : ' ', 'wc_rev' : '2', 'repos_rev' : '3'}]) expected_status_tree = svntest.tree.build_generic_tree(status_list) # Do an update from revision 2 and make sure that our binary file # gets reverted to its original contents. return svntest.actions.run_and_verify_update(wc_dir, expected_output_tree, expected_disk_tree, expected_status_tree, None, None, None, None, 1, '-r', '2')
return check_update_a_file(diff_output)
if check_update_a_file(diff_output): return 1 os.chdir(wc_dir) diff_output, err_output = svntest.main.run_svn(None, 'diff', '-r1:2') os.chdir(was_cwd) if check_update_a_file(diff_output): return 1 diff_output, err_output = svntest.main.run_svn(None, 'diff', '-r2:3', url) if check_add_a_file_in_a_subdir(diff_output): return 1 os.chdir(wc_dir) diff_output, err_output = svntest.main.run_svn(None, 'diff', '-r2:3') os.chdir(was_cwd) if check_add_a_file_in_a_subdir(diff_output): return 1 diff_output, err_output = svntest.main.run_svn(None, 'diff', '-r4:5', url) if check_update_added_file(diff_output): return 1 os.chdir(wc_dir) diff_output, err_output = svntest.main.run_svn(None, 'diff', '-r4:5') os.chdir(was_cwd) if check_update_added_file(diff_output): return 1 os.chdir(wc_dir) diff_output, err_output = svntest.main.run_svn(None, 'diff', '-rh') os.chdir(was_cwd) if check_add_a_file_in_a_subdir_reverse(diff_output): return 1 return 0
def diff_pure_repository_update_a_file(sbox): "pure repository diff update a file" if sbox.build(): return 1 wc_dir = sbox.wc_dir was_cwd = os.getcwd() os.chdir(wc_dir) update_a_file() svntest.main.run_svn(None, 'ci') os.chdir(was_cwd) url = svntest.main.test_area_url + '/' + svntest.main.current_repo_dir diff_output, err_output = svntest.main.run_svn(None, 'diff', '-r1:2', url) return check_update_a_file(diff_output)
rm = re.compile ('^(..)(.)(.+)(\d+)\s+(.+)')
rm = re.compile ('^(..)(.)([^0-9]+)(\d+|-)\s+(.+)')
def build_tree_from_status(lines): "Return a tree derived by parsing the output LINES from 'st'." root = SVNTreeNode(root_node_name) rm = re.compile ('^.+\:.+(\d+)') lastline = string.strip(lines.pop()) match = rm.search(lastline) if match and match.groups(): repos_rev = match.group(1) else: repos_rev = '?' rm = re.compile ('^(..)(.)(.+)(\d+)\s+(.+)') for line in lines: match = rm.search(line) if match and match.groups(): new_branch = create_from_path(match.group(5), None, {}, {'status' : match.group(1), 'locked' : match.group(2), 'wc_rev' : match.group(4), 'repos_rev' : repos_rev}) root.add_child(new_branch) return root
new_branch = create_from_path(match.group(5), None, {}, {'status' : match.group(1), 'locked' : match.group(2), 'wc_rev' : match.group(4), 'repos_rev' : repos_rev})
if match.group(4) != '-': new_branch = create_from_path(match.group(5), None, {}, {'status' : match.group(1), 'locked' : match.group(2), 'wc_rev' : match.group(4), 'repos_rev' : repos_rev})
def build_tree_from_status(lines): "Return a tree derived by parsing the output LINES from 'st'." root = SVNTreeNode(root_node_name) rm = re.compile ('^.+\:.+(\d+)') lastline = string.strip(lines.pop()) match = rm.search(lastline) if match and match.groups(): repos_rev = match.group(1) else: repos_rev = '?' rm = re.compile ('^(..)(.)(.+)(\d+)\s+(.+)') for line in lines: match = rm.search(line) if match and match.groups(): new_branch = create_from_path(match.group(5), None, {}, {'status' : match.group(1), 'locked' : match.group(2), 'wc_rev' : match.group(4), 'repos_rev' : repos_rev}) root.add_child(new_branch) return root
output_list = [ [os.path.join(wc_backup, mu_path),
output_list = [ [mu_path,
def update_props(): "receive properties via update" # Bootstrap sbox = sandbox(update_props) wc_dir = os.path.join (svntest.main.general_wc_dir, sbox) if svntest.actions.make_repo_and_wc(sbox): return 1 # Make a backup copy of the working copy wc_backup = wc_dir + 'backup' svntest.actions.duplicate_dir(wc_dir, wc_backup) # Add a property to a file and a directory mu_path = os.path.join(wc_dir, 'A', 'mu') H_path = os.path.join(wc_dir, 'A', 'D', 'H') svntest.main.run_svn('propset', 'blue', 'azul', mu_path) svntest.main.run_svn('propset', 'red', 'rojo', H_path) # Create expected output tree. output_list = [ [mu_path, None, {}, {'verb' : 'Changing'}], [ H_path, None, {}, {'verb' : 'Changing'}] ] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Created expected status tree. status_list = svntest.actions.get_virginal_status_list(wc_dir, '1') for item in status_list: item[3]['repos_rev'] = '2' # post-commit status if (item[0] == mu_path) or (item[0] == H_path): item[3]['wc_rev'] = '2' item[3]['status'] = '__' expected_status_tree = svntest.tree.build_generic_tree(status_list) # Commit the one file. if svntest.actions.run_and_verify_commit (wc_dir, expected_output_tree, expected_status_tree, None, None, None, None, None, wc_dir): return 1 # Create expected output tree for an update of the wc_backup. output_list = [ [os.path.join(wc_backup, mu_path), None, {}, {'status' : '_U'}], [os.path.join(wc_backup, H_path), None, {}, {'status' : '_U'}] ] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Create expected disk tree for the update. my_greek_tree = svntest.main.copy_greek_tree() my_greek_tree[2][2]['blue'] = 'azul' # A/mu my_greek_tree[16][2]['red'] = 'rojo' # A/D/H expected_disk_tree = svntest.tree.build_generic_tree(my_greek_tree) # Create expected status tree for the update. status_list = svntest.actions.get_virginal_status_list(wc_backup, '2') for item in status_list: if (item[0] == mu_path) or (item[0] == H_path): item[3]['status'] = '__' expected_status_tree = svntest.tree.build_generic_tree(status_list) # Do the update and check the results in three ways... INCLUDING PROPS return svntest.actions.run_and_verify_update(wc_backup, expected_output_tree, expected_disk_tree, expected_status_tree, None, None, None, None, 1)
[os.path.join(wc_backup, H_path),
[H_path,
def update_props(): "receive properties via update" # Bootstrap sbox = sandbox(update_props) wc_dir = os.path.join (svntest.main.general_wc_dir, sbox) if svntest.actions.make_repo_and_wc(sbox): return 1 # Make a backup copy of the working copy wc_backup = wc_dir + 'backup' svntest.actions.duplicate_dir(wc_dir, wc_backup) # Add a property to a file and a directory mu_path = os.path.join(wc_dir, 'A', 'mu') H_path = os.path.join(wc_dir, 'A', 'D', 'H') svntest.main.run_svn('propset', 'blue', 'azul', mu_path) svntest.main.run_svn('propset', 'red', 'rojo', H_path) # Create expected output tree. output_list = [ [mu_path, None, {}, {'verb' : 'Changing'}], [ H_path, None, {}, {'verb' : 'Changing'}] ] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Created expected status tree. status_list = svntest.actions.get_virginal_status_list(wc_dir, '1') for item in status_list: item[3]['repos_rev'] = '2' # post-commit status if (item[0] == mu_path) or (item[0] == H_path): item[3]['wc_rev'] = '2' item[3]['status'] = '__' expected_status_tree = svntest.tree.build_generic_tree(status_list) # Commit the one file. if svntest.actions.run_and_verify_commit (wc_dir, expected_output_tree, expected_status_tree, None, None, None, None, None, wc_dir): return 1 # Create expected output tree for an update of the wc_backup. output_list = [ [os.path.join(wc_backup, mu_path), None, {}, {'status' : '_U'}], [os.path.join(wc_backup, H_path), None, {}, {'status' : '_U'}] ] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Create expected disk tree for the update. my_greek_tree = svntest.main.copy_greek_tree() my_greek_tree[2][2]['blue'] = 'azul' # A/mu my_greek_tree[16][2]['red'] = 'rojo' # A/D/H expected_disk_tree = svntest.tree.build_generic_tree(my_greek_tree) # Create expected status tree for the update. status_list = svntest.actions.get_virginal_status_list(wc_backup, '2') for item in status_list: if (item[0] == mu_path) or (item[0] == H_path): item[3]['status'] = '__' expected_status_tree = svntest.tree.build_generic_tree(status_list) # Do the update and check the results in three ways... INCLUDING PROPS return svntest.actions.run_and_verify_update(wc_backup, expected_output_tree, expected_disk_tree, expected_status_tree, None, None, None, None, 1)
"unversioned",
"Can't find an entry",
def commit_unversioned_thing(sbox): "committing unversioned object produces error" if sbox.build(): return 1 wc_dir = sbox.wc_dir # Create an unversioned file in the wc. svntest.main.file_append(os.path.join(wc_dir, 'blorg'), "nothing to see") # Commit a non-existent file and *expect* failure: return svntest.actions.run_and_verify_commit (wc_dir, None, None, "unversioned", None, None, None, None, os.path.join(wc_dir,'blorg'))
def make_vardict(_group): vars = {} _cur = libweb100.web100_var_head(_group) while _cur != None: var = Web100Var(_cur, _group) vars[str(var)] = var _cur = libweb100.web100_var_next(_cur) return vars
def make_vardict(_group): vars = {} _cur = libweb100.web100_var_head(_group) while _cur != None: var = Web100Var(_cur, _group) vars[str(var)] = var _cur = libweb100.web100_var_next(_cur) return vars
self.write_vars = make_vardict(self._tune_group) self.read_vars = make_vardict(self._read_group) for (name, var) in self.write_vars.items(): try: self.read_vars[name] except: self.read_vars[name] = var
_cur = libweb100.web100_var_head(self._read_group) while _cur != None: var = Web100Var(_cur, self._read_group) self.read_vars[str(var)] = var _cur = libweb100.web100_var_next(_cur)
def __init__(self, host=None): if (host != None): raise error("Remote agents not supported.") _agent = libweb100.web100_attach(libweb100.WEB100_AGENT_TYPE_LOCAL, None) if _agent == None: libweb100_err() self._agent = _agent self._tune_group = libweb100.web100_group_find(_agent, "tune") if self._tune_group == None: libweb100_err() self._read_group = libweb100.web100_group_find(_agent, "read") if self._read_group == None: libweb100_err() self.write_vars = make_vardict(self._tune_group) self.read_vars = make_vardict(self._read_group) for (name, var) in self.write_vars.items(): try: self.read_vars[name] except: self.read_vars[name] = var self.bufp = libweb100.new_bufp()
try: libweb100.delete_bufp(self.bufp) except: pass
libweb100.delete_bufp(self.bufp)
def __del__(self): try: libweb100.delete_bufp(self.bufp) except: pass
var.valtobuf(val, self.agent.bufp) if libweb100.web100_raw_write(var._var, self._connection, self.agent.bufp) != \
buf = var.valtobuf(val, self.agent.bufp) if libweb100.web100_raw_write(var._var, self._connection, buf) != \
def write(self, name, val): """Write a value to a single variable.""" try: var = self.agent.write_vars[name] except KeyError: raise error("No writable variable '%s' found."%name) var.valtobuf(val, self.agent.bufp) if libweb100.web100_raw_write(var._var, self._connection, self.agent.bufp) != \ libweb100.WEB100_ERR_SUCCESS: libweb100_err()
class Web100ReadLog: def __init__(self, logname): self._log = libweb100.web100_log_open_read(logname) if self._log == None: libweb100_err() self._snap = libweb100.web100_snapshot_alloc_from_log(self._log) if self._snap == None: libweb100_err() self.vars = make_vardict(libweb100.web100_get_log_group(self._log)) self.bufp = libweb100.new_bufp() def __del__(self): libweb100.delete_bufp(self.bufp) def read(self): if libweb100.web100_snap_from_log(self._snap, self._log) != \ libweb100.WEB100_ERR_SUCCESS: return None snap = {} for (name, var) in self.vars.items(): if libweb100.web100_snap_read(var._var, self._snap, self.bufp) != \ libweb100.WEB100_ERR_SUCCESS: libweb100_err() snap[name] = var.val(self.bufp) return snap class Web100WriteLog: def __init__(self, logname, conn, _snap): self.conn = conn self._snap = _snap self._log = libweb100.web100_log_open_write(logname, conn._connection, libweb100.web100_get_snap_group(_snap)) if self._log == None: libweb100_err() def write(self): if libweb100.web100_log_write(self._log, self._snap) != \ libweb100.WEB100_ERR_SUCCESS: libweb100_err()
def write(self, name, val): """Write a value to a single variable.""" try: var = self.agent.write_vars[name] except KeyError: raise error("No writable variable '%s' found."%name) var.valtobuf(val, self.agent.bufp) if libweb100.web100_raw_write(var._var, self._connection, self.agent.bufp) != \ libweb100.WEB100_ERR_SUCCESS: libweb100_err()
elif self._type == libweb100.WEB100_TYPE_INET_ADDRESS_IPV4 or \ self._type == libweb100.WEB100_TYPE_IP_ADDRESS or \ self._type == libweb100.WEB100_TYPE_INET_ADDRESS_IPV6 or \ self._type == libweb100.WEB100_TYPE_INET_ADDRESS:
else:
def val(self, bufp): if self._type == libweb100.WEB100_TYPE_INET_PORT_NUMBER or\ self._type == libweb100.WEB100_TYPE_UNSIGNED16: return libweb100.u16p_value(libweb100.bufp_to_u16p(bufp)) elif self._type == libweb100.WEB100_TYPE_INTEGER or \ self._type == libweb100.WEB100_TYPE_INTEGER32: return libweb100.s32p_value(libweb100.bufp_to_s32p(bufp)) elif self._type == libweb100.WEB100_TYPE_COUNTER32 or \ self._type == libweb100.WEB100_TYPE_GAUGE32 or \ self._type == libweb100.WEB100_TYPE_UNSIGNED32 or \ self._type == libweb100.WEB100_TYPE_TIME_TICKS: return libweb100.u32p_value(libweb100.bufp_to_u32p(bufp)) elif self._type == libweb100.WEB100_TYPE_COUNTER64: return libweb100.u64p_value(libweb100.bufp_to_u64p(bufp)) elif self._type == libweb100.WEB100_TYPE_INET_ADDRESS_IPV4 or \ self._type == libweb100.WEB100_TYPE_IP_ADDRESS or \ self._type == libweb100.WEB100_TYPE_INET_ADDRESS_IPV6 or \ self._type == libweb100.WEB100_TYPE_INET_ADDRESS: return libweb100.web100_value_to_text(self._type, bufp) else: raise error("Unknown Web100 type: %d"%self._type)
else: raise error("Unknown Web100 type: %d"%self._type)
def val(self, bufp): if self._type == libweb100.WEB100_TYPE_INET_PORT_NUMBER or\ self._type == libweb100.WEB100_TYPE_UNSIGNED16: return libweb100.u16p_value(libweb100.bufp_to_u16p(bufp)) elif self._type == libweb100.WEB100_TYPE_INTEGER or \ self._type == libweb100.WEB100_TYPE_INTEGER32: return libweb100.s32p_value(libweb100.bufp_to_s32p(bufp)) elif self._type == libweb100.WEB100_TYPE_COUNTER32 or \ self._type == libweb100.WEB100_TYPE_GAUGE32 or \ self._type == libweb100.WEB100_TYPE_UNSIGNED32 or \ self._type == libweb100.WEB100_TYPE_TIME_TICKS: return libweb100.u32p_value(libweb100.bufp_to_u32p(bufp)) elif self._type == libweb100.WEB100_TYPE_COUNTER64: return libweb100.u64p_value(libweb100.bufp_to_u64p(bufp)) elif self._type == libweb100.WEB100_TYPE_INET_ADDRESS_IPV4 or \ self._type == libweb100.WEB100_TYPE_IP_ADDRESS or \ self._type == libweb100.WEB100_TYPE_INET_ADDRESS_IPV6 or \ self._type == libweb100.WEB100_TYPE_INET_ADDRESS: return libweb100.web100_value_to_text(self._type, bufp) else: raise error("Unknown Web100 type: %d"%self._type)
print cfgtuple
def main(): from twisted.internet import reactor from ConfigParser import SafeConfigParser # Read the configuration file configFile = '/etc/pybal/pybal.conf' config = SafeConfigParser() config.read(configFile) services = {} for section in config.sections(): cfgtuple = ( config.get(section, 'protocol'), config.get(section, 'ip'), config.getint(section, 'port'), config.get(section, 'scheduler')) print cfgtuple services[section] = ipvs.LVSService(section, cfgtuple) crd = Coordinator(services[section], configURL=config.get(section, 'config')) print "Created LVS service '%s'" % section reactor.run()
s = hmac.HMAC(secret, digestmod = MD5)
s = hmac.HMAC(secret, digestmod = SHA256)
def gen_hmac(secret, ip): epoch_mins = (long)(time()/60) s = hmac.HMAC(secret, digestmod = MD5) s.update(socket.inet_aton(socket.gethostbyname(ip))) s.update(struct.pack("i", epoch_mins)) # "i" is for integer print s.hexdigest()
r" *use +(?P<module>[a-zA-Z_][a-zA-Z_0-9]*)(?P<only> *, *only:)? *(?P<imports>.*)$",
r" *use +(?P<module>[a-zA-Z_][a-zA-Z_0-9]*)(?P<only> *, *only *:)? *(?P<imports>.*)$",
def parseUse(inFile): """Parses the use statements in inFile The parsing stops at the first non use statement. Returns something like: ([{'module':'module1','only':['el1',el2=>el3']},...], '! comment1\\n!comment2...\\n', 'last line (the line that stopped the parsing)') """ useStartRe=re.compile( r" *(?P<use>use[^&!]*)(?P<continue>&?) *(?P<comment>!.*)?$", flags=re.IGNORECASE) commentRe=re.compile(r" *!.*$") contLineRe=re.compile( r"(?P<contLine>[^&!]*)(?P<continue>&?) *(?P<comment>!.*)?$") useParseRe=re.compile( r" *use +(?P<module>[a-zA-Z_][a-zA-Z_0-9]*)(?P<only> *, *only:)? *(?P<imports>.*)$", flags=re.IGNORECASE) lineNr=0 comments="" modules=[] line="" while 1: line=inFile.readline() lineNr=lineNr+1 if not line: break m=useStartRe.match(line) if m: # read whole use compactedUse=m.group('use') useComments="" if m.group('comment'): useComments=m.group('comment')+'\n' while m.group('continue'): lineNr=lineNr+1 m=contLineRe.match(inFile.readline()) compactedUse=compactedUse+m.group('contLine') if m.group('comment'): useComments=useComments+m.group('comment')+'\n' # parse use m=useParseRe.match(compactedUse) if not m: raise SyntaxError("could not parse use ending at line "+ str(lineNr)+" (compactedUse="+compactedUse+ ")") useAtt={'module':m.group('module')} if m.group('only'): useAtt['only']=map(string.strip, string.split(m.group('imports'),',')) else: useAtt['renames']=map(string.strip, string.split(m.group('imports'),',')) if useComments : useAtt['comments']=useComments # add use to modules modules.append(useAtt) elif commentRe.match(line): comments=comments+line elif line and not line.isspace(): break return (modules,comments,line)
elif m.has_key('renames'): outFile.write(" USE "+m['module']+","+ string.ljust("",38)) if m['renames']: outFile.write(m['renames'][0]) for i in range(1,len(m['renames'])): write(",&\n"+string.ljust("",45)+m['renames'][i])
else: outFile.write(" USE "+m['module']) if m.has_key('renames') and m['renames']: outFile.write(","+string.ljust("",38)+ m['renames'][0]) for i in range(1,len(m['renames'])): outFile.write(",&\n"+string.ljust("",45)+m['renames'][i])
def writeUseLong(modules,outFile): for m in modules: if m.has_key('only'): outFile.write(" USE "+m['module']+","+ string.rjust('ONLY: ',38-len(m['module']))) if m['only']: outFile.write(m['only'][0]) for i in range(1,len(m['only'])): outFile.write(",&\n"+string.ljust("",45)+m['only'][i]) elif m.has_key('renames'): outFile.write(" USE "+m['module']+","+ string.ljust("",38)) if m['renames']: outFile.write(m['renames'][0]) for i in range(1,len(m['renames'])): write(",&\n"+string.ljust("",45)+m['renames'][i]) if m.has_key('comments'): for commt in m['comments']: outfile.write("&\n"+m['comments'][i]) outfile.write("\n")
outFile.write("\n") outFile.write('\n'.join(m['comments']))
file.write("\n") file.write('\n'.join(m['comments']))
def writeUseShort(modules,file): """Writes a declaration in a compact way""" for m in modules: uLine=[] if m.has_key('only'): uLine.append(" USE "+m['module']+", ONLY: ") for k in m['only'][:-1]: uLine.append(k+", ") uLine.append(m['only'][-1]) elif m.has_key('renames') and m['renames']: uLine.append(" USE "+m['module']+", ") for k in m['renames'][:-1]: uLine.append(k+", ") uLine.append(m['renames'][-1]) else: uLine.append(" USE "+m['module']) writeInCols(uLine,7,79,0,file) if m['comments']: outFile.write("\n") outFile.write('\n'.join(m['comments'])) file.write("\n")
nonWordRe=re.compile(r"(\W)")
nonWordRe=re.compile(r"([^a-zA-Z0-9_.])")
def writeInCols(dLine,indentCol,maxCol,indentAtt,file): """writes out the strings (trying not to cut them) in dLine up to maxCol indenting each newline with indentCol. The '&' of the continuation line is at maxCol. indentAtt is the actual intent, and the new indent is returned""" strRe=re.compile(r"('[^'\n]*'|\"[^\"\n]*\")") nonWordRe=re.compile(r"(\W)") maxSize=maxCol-indentCol-1 tol=min(maxSize/6,6)+indentCol for fragment in dLine: if indentAtt+len(fragment)<maxCol: file.write(fragment) indentAtt+=len(fragment) elif len(fragment.lstrip())<=maxSize: file.write("&\n"+(" "*indentCol)) file.write(fragment.lstrip()) indentAtt=indentCol+len(fragment.lstrip()) else: sPieces=strRe.split(fragment) for sPiece in sPieces: if sPiece and (not (sPiece[0]=='"' or sPiece[0]=="'")): subPieces=nonWordRe.split(sPiece) else: subPieces=[sPiece] for subPiece in subPieces: if indentAtt==indentCol: file.write(subPiece.lstrip()) indentAtt+=len(subPiece.lstrip()) elif indentAtt<tol or indentAtt+len(subPiece)<maxCol: file.write(subPiece) indentAtt+=len(subPiece) else: file.write("&\n"+(" "*indentCol)) file.write(subPiece.lstrip()) indentAtt=indentCol+len(subPiece.lstrip()) return indentAtt
try: prettfyInplace(fileName,bkDir, normalize_use=defaultsDict['normalize-use'], upcase_keywords=defaultsDict['upcase'], interfaces_dir=defaultsDict['interface-dir'], replace=defaultsDict['replace']) except: import traceback sys.stdout.write('-'*60+"\n") traceback.print_exc(file=sys.stdout) sys.stdout.write('-'*60+"\n") sys.stdout.write("Processing file '"+fileName+"'\n")
prettfyInplace(fileName,bkDir, normalize_use=defaultsDict['normalize-use'], upcase_keywords=defaultsDict['upcase'], interfaces_dir=defaultsDict['interface-dir'], replace=defaultsDict['replace'])
def prettfyInplace(fileName,bkDir="preprettify",normalize_use=1, upcase_keywords=1, interfaces_dir=None, replace=None,logFile=sys.stdout): """Same as prettify, but inplace, replaces only if needed""" if not os.path.exists(bkDir): os.mkdir(bkDir) if not os.path.isdir(bkDir): raise Error("bk-dir must be a directory, was "+bkDir) infile=open(fileName,'r') outfile=prettifyFile(infile, normalize_use, upcase_keywords, interfaces_dir, replace) if (infile==outfile): return infile.seek(0) outfile.seek(0) same=1 while 1: l1=outfile.readline() l2=infile.readline() if (l1!=l2): same=0 break if not l1: break if (not same): bkName=os.path.join(bkDir,os.path.basename(fileName)) bName=bkName i=0 while os.path.exists(bkName): i+=1 bkName=bName+"."+str(i) infile.seek(0) bkFile=file(bkName,"w") while 1: l1=infile.readline() if not l1: break bkFile.write(l1) bkFile.close() outfile.seek(0) newFile=file(fileName,'w') while 1: l1=outfile.readline() if not l1: break newFile.write(l1) newFile.close() infile.close() outfile.close()
nullifys="".join(nullifyRe.findall(rest))
nullifys=",".join(nullifyRe.findall(rest))
def cleanDeclarations(routine,logFile=sys.stdout): """cleans up the declaration part of the given parsed routine removes unused variables""" global rVar commentToRemoveRe=re.compile(r" *! *(?:interface|arguments|parameters|locals?|\** *local +variables *\**|\** *local +parameters *\**) *$",re.IGNORECASE) nullifyRe=re.compile(r" *nullify *\(([^()]+)\) *\n?",re.IGNORECASE|re.MULTILINE) if not routine['kind']: return if (routine['core'] and re.match(" *type *[a-zA-Z_]+ *$",routine['core'][0],re.IGNORECASE)): logFile.write("*** routine %s contains local types, not fully cleaned ***\n"% (routine['name'])) if re.search("^#","".join(routine['declarations']),re.MULTILINE): logFile.write("*** routine %s declarations contain preprocessor directives ***\n*** declarations not cleaned ***\n"%( routine['name'])) return try: rest="".join(routine['strippedCore']).lower() nullifys="".join(nullifyRe.findall(rest)) rest=nullifyRe.sub("",rest) paramDecl=[] decls=[] for d in routine['parsedDeclarations']: d['normalizedType']=d['type'] if d['parameters']: d['normalizedType']+=d['parameters'] if (d["attributes"]): d['attributes'].sort(lambda x,y:cmp(x.lower(),y.lower())) d['normalizedType']+=', ' d['normalizedType']+=', '.join(d['attributes']) if "parameter" in map(str.lower,d['attributes']): paramDecl.append(d) else: decls.append(d) sortDeclarations(paramDecl) sortDeclarations(decls) has_routinen=0 pos_routinep=-1 for d in paramDecl: for i in xrange(len(d['vars'])): v=d['vars'][i] m=varRe.match(v) lowerV=m.group("var").lower() if lowerV=="routinen": has_routinen=1 d['vars'][i]="routineN = '"+routine['name']+"'" elif lowerV=="routinep": pos_routinep=i d['vars'][i]="routineP = moduleN//':'//routineN" if not has_routinen and pos_routinep>=0: d['vars'].insert(pos_routinep,"routineN = '"+routine['name']+"'") if routine['arguments']: routine['lowercaseArguments']=map(lambda x:x.lower(),routine['arguments']) else: routine['lowercaseArguments']=[] if routine['result']: routine['lowercaseArguments'].append(routine['result'].lower()) argDeclDict={} localDecl=[] for d in decls: localD={} localD.update(d) localD['vars']=[] argD=None for v in d['vars']: m=varRe.match(v) lowerV=m.group("var").lower() if lowerV in routine['lowercaseArguments']: argD={} argD.update(d) argD['vars']=[v] if argDeclDict.has_key(lowerV): raise SyntaxError( "multiple declarations not supported. var="+v+ " declaration="+str(d)+"routine="+routine['name']) argDeclDict[lowerV]=argD else: pos=findWord(lowerV,rest) if (pos!=-1): localD['vars'].append(v) else: if findWord(lowerV,nullifys)!=-1: if not rmNullify(lowerV,routine['core']): raise SyntaxError( "could not remove nullify of "+lowerV+ " as expected, routine="+routine['name']) logFile.write("removed var %s in routine %s\n" % (lowerV,routine['name'])) rVar+=1 if (len(localD['vars'])): localDecl.append(localD) argDecl=[] for arg in routine['lowercaseArguments']: argDecl.append(argDeclDict[arg]) if routine['kind'].lower()=='function': aDecl=argDecl[:-1] else: aDecl=argDecl isOptional=0 for arg in aDecl: attIsOptional= ("optional" in map(lambda x:x.lower(), arg['attributes'])) if isOptional and not attIsOptional: logFile.write("*** warning non optional args %s after optional in routine %s\n" %( repr(arg['vars']),routine['name'])) if attIsOptional: isOptional=1 enforceDeclDependecies(argDecl) newDecl=StringIO() for comment in routine['preDeclComments']: if not commentToRemoveRe.match(comment): newDecl.write(comment) newDecl.writelines(routine['use']) writeDeclarations(argDecl,newDecl) if argDecl and paramDecl: newDecl.write("\n") writeDeclarations(paramDecl,newDecl) if (argDecl or paramDecl) and localDecl: newDecl.write("\n") writeDeclarations(localDecl,newDecl) if argDecl or paramDecl or localDecl: newDecl.write("\n") wrote=0 for comment in routine['declComments']: if not commentToRemoveRe.match(comment): newDecl.write(comment) newDecl.write("\n") wrote=1 if wrote: newDecl.write("\n") routine['declarations']=[newDecl.getvalue()] except: if routine.has_key('name'): logFile.write("**** exception cleaning routine "+routine['name']+" ****") logFile.write("parsedDeclartions="+str(routine['parsedDeclarations'])) raise
'g4x6':{'atoms':[3,4,5,6],'distances':[7,8,9,10,11],'molecule':[2]},
'g4x6':{'atoms':[3,4,5,6],'distances':[7,8,9,10,11,12],'molecule':[2]},
def cons_conv(oldInput,oldSect,newInput,new_sect,conv): del new_sect.auto_g new_sect.pre_comments=oldSect.pre_comments new_sect.post_comments=oldSect.post_comments consTypes={'g3x3':{'atoms':[3,4,5],'distances':[6,7,8],'molecule':[2]}, 'g4x6':{'atoms':[3,4,5,6],'distances':[7,8,9,10,11],'molecule':[2]}, 'dist':{'atoms':[3,4],'distance':[5],'molecule':[2]}} for line in oldSect.raw_lines: if line.isspace():continue ll=line.split() sname=ll[0].lower() if sname.lower()=='dist': s=guaranteePath(new_sect,conv.upcase("internals")) sAtt=Section(conv.upcase('distance')) s[-1].add_subsection(sAtt) else: sAtt=Section(conv.upcase(sname)) new_sect.add_subsection(sAtt) convAtt=consTypes[sname] for (k,v) in convAtt.iteritems(): kw=Keyword(conv.upcase(k),map(lambda x:ll[x],v)) sAtt.add_keyword(kw)
if line.split()[0].lower()=="end":break
ll=line.split() if ll[0].lower()=="end":break
def ff_conv(oldInput,oldSect,newInput,new_sect,conv): del new_sect.auto_g new_sect.pre_comments=oldSect.pre_comments new_sect.post_comments=oldSect.post_comments l_nr=0 nl=len(oldSect.raw_lines) while 1: if l_nr>=nl: break line=oldSect.raw_lines[l_nr] ll=line.strip().lower() if ll=="charges": while 1: l_nr+=1 if l_nr>=nl: break line=oldSect.raw_lines[l_nr] if line.split()[0].lower()=="end":break ch=Section(conv.upcase("charge")) ch.add_keyword(Keyword(conv.upcase("atom"), values=[line.split()[0]])) ch.add_keyword(Keyword(conv.upcase("charge"), values=[line.split()[1]])) new_sect.add_subsection(ch) elif ll=="bends": while 1: l_nr+=1 if l_nr>=nl: break line=oldSect.raw_lines[l_nr] if line.split()[0].lower()=="end":break ch=Section(conv.upcase("bend")) ch.add_keyword(Keyword(conv.upcase("atoms"),values=line.split()[1:4])) ch.add_keyword(Keyword(conv.upcase("k"),values=[line.split()[4]])) ch.add_keyword(Keyword(conv.upcase("theta0"),values=[line.split()[5]])) new_sect.add_subsection(ch) elif ll=="bonds": while 1: l_nr+=1 if l_nr>=nl: break line=oldSect.raw_lines[l_nr] if line.split()[0].lower()=="end":break ch=Section(conv.upcase("bond")) ch.add_keyword(Keyword(conv.upcase("atoms"),values=line.split()[1:3])) ch.add_keyword(Keyword(conv.upcase("k"),values=[line.split()[3]])) ch.add_keyword(Keyword(conv.upcase("r0"),values=[line.split()[4]])) new_sect.add_subsection(ch) elif ll.split()[0]=="parmfile": new_sect.add_keyword(Keyword("parmfile",[line.split()[2]])) new_sect.add_keyword(Keyword("parmtype",[line.split()[1]])) elif ll.split()[0]=="ei_scale": new_sect.add_keyword(Keyword("ei_scale14",[line.split()[1]])) elif ll.split()[0]=="vdw_scale": new_sect.add_keyword(Keyword("vdw_scale14",[line.split()[1]])) elif ll.split()[0]=="rcut_nb": new_sect.add_keyword(Keyword("rcut_nb",[line.split()[1]])) elif ll.split()[0]=="nonbonded": ss=Section(conv.upcase("nonbonded")) new_sect.add_subsection(ss) f_data={'lennard-jones':{3:'epsilon',4:'sigma',5:'rcut'}, 'bmhft':{3:'rcut'},'eam':{3:'parmfile'},'ipbv':{3:'rcut'}, 'williams':{3:'a',4:'b',5:'c',6:'rcut'}} while 1: l_nr+=1 if l_nr>=nl: break line=oldSect.raw_lines[l_nr] if line.split()[0].lower()=="end":break l=line.split() sname=l[0].lower() ch=Section(conv.upcase(l[0])) ch.add_keyword(Keyword("atom",l[1:3])) for idx in f_data[sname]: kname=f_data[sname][idx] ch.add_keyword(Keyword(conv.upcase(kname),values=[l[idx]])) ss.add_subsection(ch) else: print "WARNING ignoring line ",repr(line),"in forcefield section" l_nr+=1
ch.add_keyword(Keyword(conv.upcase("atoms"),values=line.split()[1:3])) ch.add_keyword(Keyword(conv.upcase("k"),values=[line.split()[3]])) ch.add_keyword(Keyword(conv.upcase("r0"),values=[line.split()[4]]))
ch.add_keyword(Keyword(conv.upcase("atoms"),values=ll[1:3])) if ll[0].lower()=="harmonic": ch.add_keyword(Keyword(conv.upcase("k"),values=[ll[3]])) ch.add_keyword(Keyword(conv.upcase("r0"),values=[line.split()[4]])) elif ll[0].lower()=="quartic": ch.add_keyword(Keyword(conv.upcase("k"),values=ll[3:6])) ch.add_keyword(Keyword(conv.upcase("r0"),values=[line.split()[6]])) else: print "WARNING unknown bond type in forcefield section:",ll[0]
def ff_conv(oldInput,oldSect,newInput,new_sect,conv): del new_sect.auto_g new_sect.pre_comments=oldSect.pre_comments new_sect.post_comments=oldSect.post_comments l_nr=0 nl=len(oldSect.raw_lines) while 1: if l_nr>=nl: break line=oldSect.raw_lines[l_nr] ll=line.strip().lower() if ll=="charges": while 1: l_nr+=1 if l_nr>=nl: break line=oldSect.raw_lines[l_nr] if line.split()[0].lower()=="end":break ch=Section(conv.upcase("charge")) ch.add_keyword(Keyword(conv.upcase("atom"), values=[line.split()[0]])) ch.add_keyword(Keyword(conv.upcase("charge"), values=[line.split()[1]])) new_sect.add_subsection(ch) elif ll=="bends": while 1: l_nr+=1 if l_nr>=nl: break line=oldSect.raw_lines[l_nr] if line.split()[0].lower()=="end":break ch=Section(conv.upcase("bend")) ch.add_keyword(Keyword(conv.upcase("atoms"),values=line.split()[1:4])) ch.add_keyword(Keyword(conv.upcase("k"),values=[line.split()[4]])) ch.add_keyword(Keyword(conv.upcase("theta0"),values=[line.split()[5]])) new_sect.add_subsection(ch) elif ll=="bonds": while 1: l_nr+=1 if l_nr>=nl: break line=oldSect.raw_lines[l_nr] if line.split()[0].lower()=="end":break ch=Section(conv.upcase("bond")) ch.add_keyword(Keyword(conv.upcase("atoms"),values=line.split()[1:3])) ch.add_keyword(Keyword(conv.upcase("k"),values=[line.split()[3]])) ch.add_keyword(Keyword(conv.upcase("r0"),values=[line.split()[4]])) new_sect.add_subsection(ch) elif ll.split()[0]=="parmfile": new_sect.add_keyword(Keyword("parmfile",[line.split()[2]])) new_sect.add_keyword(Keyword("parmtype",[line.split()[1]])) elif ll.split()[0]=="ei_scale": new_sect.add_keyword(Keyword("ei_scale14",[line.split()[1]])) elif ll.split()[0]=="vdw_scale": new_sect.add_keyword(Keyword("vdw_scale14",[line.split()[1]])) elif ll.split()[0]=="rcut_nb": new_sect.add_keyword(Keyword("rcut_nb",[line.split()[1]])) elif ll.split()[0]=="nonbonded": ss=Section(conv.upcase("nonbonded")) new_sect.add_subsection(ss) f_data={'lennard-jones':{3:'epsilon',4:'sigma',5:'rcut'}, 'bmhft':{3:'rcut'},'eam':{3:'parmfile'},'ipbv':{3:'rcut'}, 'williams':{3:'a',4:'b',5:'c',6:'rcut'}} while 1: l_nr+=1 if l_nr>=nl: break line=oldSect.raw_lines[l_nr] if line.split()[0].lower()=="end":break l=line.split() sname=l[0].lower() ch=Section(conv.upcase(l[0])) ch.add_keyword(Keyword("atom",l[1:3])) for idx in f_data[sname]: kname=f_data[sname][idx] ch.add_keyword(Keyword(conv.upcase(kname),values=[l[idx]])) ss.add_subsection(ch) else: print "WARNING ignoring line ",repr(line),"in forcefield section" l_nr+=1
ch.add_keyword(Keyword("atom",l[1:3]))
ch.add_keyword(Keyword("atoms",l[1:3]))
def ff_conv(oldInput,oldSect,newInput,new_sect,conv): del new_sect.auto_g new_sect.pre_comments=oldSect.pre_comments new_sect.post_comments=oldSect.post_comments l_nr=0 nl=len(oldSect.raw_lines) while 1: if l_nr>=nl: break line=oldSect.raw_lines[l_nr] ll=line.strip().lower() if ll=="charges": while 1: l_nr+=1 if l_nr>=nl: break line=oldSect.raw_lines[l_nr] if line.split()[0].lower()=="end":break ch=Section(conv.upcase("charge")) ch.add_keyword(Keyword(conv.upcase("atom"), values=[line.split()[0]])) ch.add_keyword(Keyword(conv.upcase("charge"), values=[line.split()[1]])) new_sect.add_subsection(ch) elif ll=="bends": while 1: l_nr+=1 if l_nr>=nl: break line=oldSect.raw_lines[l_nr] if line.split()[0].lower()=="end":break ch=Section(conv.upcase("bend")) ch.add_keyword(Keyword(conv.upcase("atoms"),values=line.split()[1:4])) ch.add_keyword(Keyword(conv.upcase("k"),values=[line.split()[4]])) ch.add_keyword(Keyword(conv.upcase("theta0"),values=[line.split()[5]])) new_sect.add_subsection(ch) elif ll=="bonds": while 1: l_nr+=1 if l_nr>=nl: break line=oldSect.raw_lines[l_nr] if line.split()[0].lower()=="end":break ch=Section(conv.upcase("bond")) ch.add_keyword(Keyword(conv.upcase("atoms"),values=line.split()[1:3])) ch.add_keyword(Keyword(conv.upcase("k"),values=[line.split()[3]])) ch.add_keyword(Keyword(conv.upcase("r0"),values=[line.split()[4]])) new_sect.add_subsection(ch) elif ll.split()[0]=="parmfile": new_sect.add_keyword(Keyword("parmfile",[line.split()[2]])) new_sect.add_keyword(Keyword("parmtype",[line.split()[1]])) elif ll.split()[0]=="ei_scale": new_sect.add_keyword(Keyword("ei_scale14",[line.split()[1]])) elif ll.split()[0]=="vdw_scale": new_sect.add_keyword(Keyword("vdw_scale14",[line.split()[1]])) elif ll.split()[0]=="rcut_nb": new_sect.add_keyword(Keyword("rcut_nb",[line.split()[1]])) elif ll.split()[0]=="nonbonded": ss=Section(conv.upcase("nonbonded")) new_sect.add_subsection(ss) f_data={'lennard-jones':{3:'epsilon',4:'sigma',5:'rcut'}, 'bmhft':{3:'rcut'},'eam':{3:'parmfile'},'ipbv':{3:'rcut'}, 'williams':{3:'a',4:'b',5:'c',6:'rcut'}} while 1: l_nr+=1 if l_nr>=nl: break line=oldSect.raw_lines[l_nr] if line.split()[0].lower()=="end":break l=line.split() sname=l[0].lower() ch=Section(conv.upcase(l[0])) ch.add_keyword(Keyword("atom",l[1:3])) for idx in f_data[sname]: kname=f_data[sname][idx] ch.add_keyword(Keyword(conv.upcase(kname),values=[l[idx]])) ss.add_subsection(ch) else: print "WARNING ignoring line ",repr(line),"in forcefield section" l_nr+=1
nrRe=re.compile("[-+]?[0-9]*\\.?[0-9]+([eEdD][-+]?[0-9]+)?")
nrRe=re.compile("[-+]?[0-9]*\\.?[0-9]+([eEdD][-+]?[0-9]+)?$")
def diffEpsilon(str1, str2,incomparable_val=1): """retuns the difference between two strings, parsing numbers and confronting them.""" import re nrRe=re.compile("[-+]?[0-9]*\\.?[0-9]+([eEdD][-+]?[0-9]+)?") tokens1=str1.split() tokens2=str2.split() distance=0.0 if len(tokens1)!=len(tokens2): return incomparable_val i=0 for t1 in tokens1: t2=tokens2[i] i=i+1 if (t1!=t2): if nrRe.match(t1) and nrRe.match(t2): (f1,f2)=(float(t1),float(t2)) distance=max(distance, compareNr(f1,f2)) else: return incomparable_val return distance
alive = Alive()
def eekMexLogging(): # ------------------------------------------------------------ # Base Logging Setup # ------------------------------------------------------------ logging.basicConfig(level=logging.INFO, format='%(asctime)s %(name)-2s %(module)-10s %(levelname)-4s %(message)s', filename='eekmex.log', filemode='a') # ------------------------------------------------------------ # Logging Handlers # ------------------------------------------------------------ loggerConsole = logging.StreamHandler() loggerConsole.setLevel(logging.INFO) loggerFile = logging.FileHandler('/media/sdcard/eekmex.log', 'a') loggerFile.setLevel(logging.INFO) loggerFileGoogleEarth = logging.FileHandler('/media/sdcard/eekmexprekml.log', 'a') loggerFileGoogleEarth.setLevel(logging.WARNING) # ------------------------------------------------------------ # Logging Formatters # ------------------------------------------------------------ loggerConsoleFormatter = logging.Formatter('%(name)-2s: %(module)-10s %(levelname)-4s %(message)s') loggerConsole.setFormatter(loggerConsoleFormatter) loggerFileFormatter = logging.Formatter('%(asctime)s %(name)-2s %(module)-10s %(levelname)-4s %(message)s') loggerFile.setFormatter(loggerFileFormatter) loggerFileGoogleEarthFormatter = logging.Formatter('%(process)d %(asctime)s %(message)s', datefmt="%d %m %Y %H %M %S ") loggerFileGoogleEarth.setFormatter(loggerFileGoogleEarthFormatter) # ------------------------------------------------------------ # Logging Handlers # ------------------------------------------------------------ logging.getLogger('').addHandler(loggerConsole) logging.getLogger('').addHandler(loggerFile) logging.getLogger('').addHandler(loggerFileGoogleEarth)
alive.data()
def eekMexLogging(): # ------------------------------------------------------------ # Base Logging Setup # ------------------------------------------------------------ logging.basicConfig(level=logging.INFO, format='%(asctime)s %(name)-2s %(module)-10s %(levelname)-4s %(message)s', filename='eekmex.log', filemode='a') # ------------------------------------------------------------ # Logging Handlers # ------------------------------------------------------------ loggerConsole = logging.StreamHandler() loggerConsole.setLevel(logging.INFO) loggerFile = logging.FileHandler('/media/sdcard/eekmex.log', 'a') loggerFile.setLevel(logging.INFO) loggerFileGoogleEarth = logging.FileHandler('/media/sdcard/eekmexprekml.log', 'a') loggerFileGoogleEarth.setLevel(logging.WARNING) # ------------------------------------------------------------ # Logging Formatters # ------------------------------------------------------------ loggerConsoleFormatter = logging.Formatter('%(name)-2s: %(module)-10s %(levelname)-4s %(message)s') loggerConsole.setFormatter(loggerConsoleFormatter) loggerFileFormatter = logging.Formatter('%(asctime)s %(name)-2s %(module)-10s %(levelname)-4s %(message)s') loggerFile.setFormatter(loggerFileFormatter) loggerFileGoogleEarthFormatter = logging.Formatter('%(process)d %(asctime)s %(message)s', datefmt="%d %m %Y %H %M %S ") loggerFileGoogleEarth.setFormatter(loggerFileGoogleEarthFormatter) # ------------------------------------------------------------ # Logging Handlers # ------------------------------------------------------------ logging.getLogger('').addHandler(loggerConsole) logging.getLogger('').addHandler(loggerFile) logging.getLogger('').addHandler(loggerFileGoogleEarth)
self.backend = Backend(self.uri[1:].split('/')[0], self.factory ("http://" + self.uri[1:].split('/')[0]))
self.backend = Backend(self.uri[1:].split('/')[0], self.factory, ("http://" + self.uri[1:].split('/')[0],))
def process(self): """ Each new request begins processing here """ log.debug("Request: " + self.method + " " + self.uri); # Clean up URL self.uri = self.simplify_path(self.uri)
if log.isEnabled('apt'): self.cache = apt_pkg.GetCache() else: self.__fake_stdout() self.cache = apt_pkg.GetCache() self.__restore_stdout()
self.cache = apt_pkg.GetCache()
def load(self): """ Regenerates the fake configuration and load the packages server. """ if not self.loaded: shutil.rmtree(self.status_dir+'/apt/lists/') os.makedirs(self.status_dir+'/apt/lists/partial') sources = open(self.status_dir+'/'+'apt/etc/sources.list', 'w') for file in self.packages.keys(): # we should probably clear old entries from self.packages and # take into account the recorded mtime as optimization fake_uri='http://apt-proxy:'+file source_line='deb '+dirname(fake_uri)+'/ /' listpath=(self.status_dir+'/apt/lists/' +apt_pkg.URItoFileName(fake_uri)) sources.write(source_line+'\n')
def import_directory(factory, dir, recursive=0): """ Import all files in a given directory into the cache This is used by apt-proxy-import to import new files into the cache """ if not os.path.exists(dir): log.err('Directory ' + dir + ' does not exist', 'import') return if recursive: log.debug("Importing packages from directory tree: " + dir, 'import') for root, dirs, files in os.walk(dir): for file in files: import_file(factory, root, file) else: log.debug("Importing packages from directory: " + dir, 'import') for file in os.listdir(dir): mode = os.stat(dir + '/' + file)[stat.ST_MODE] if not stat.S_ISDIR(mode): import_file(factory, dir, file) for backend in factory.backends: backend.packages.unload()
def compare(a, b): return apt_pkg.VersionCompare(a[0], b[0])
def import_debs(factory, dir): if not os.path.exists(dir): os.makedirs(dir) for file in os.listdir(dir): if file[-4:]!='.deb': log.msg("IGNORING:"+ file, 'import') continue log.msg("considering:"+ dir+'/'+file, 'import') paths = get_mirror_path(factory, dir+'/'+file) if paths: if len(paths) != 1: log.msg("WARNING: multiple ocurrences", 'import') log.msg(str(paths), 'import') path = paths[0]
def import_file(factory, dir, file): """ Import a .deb into cache from given filename """ if file[-4:]!='.deb': log.msg("Ignoring (unknown file type):"+ file, 'import') return log.debug("considering: " + dir + '/' + file, 'import') paths = get_mirror_path(factory, dir+'/'+file) if paths: if len(paths) != 1: log.debug("WARNING: multiple ocurrences", 'import') log.debug(str(paths), 'import') cache_path = paths[0] else: log.debug("Not found, trying to guess", 'import') cache_path = closest_match(AptDpkgInfo(dir+'/'+file), get_mirror_versions(factory, dir+'/'+file)) if cache_path: log.debug("MIRROR_PATH:"+ cache_path, 'import') src_path = dir+'/'+file dest_path = factory.cache_dir+cache_path if not os.path.exists(dest_path): log.debug("IMPORTING:" + src_path, 'import') dest_path = re.sub(r'/\./', '/', dest_path) if not os.path.exists(dirname(dest_path)): os.makedirs(dirname(dest_path)) f = open(dest_path, 'w') fcntl.lockf(f.fileno(), fcntl.LOCK_EX) f.truncate(0) shutil.copy2(src_path, dest_path) f.close() if hasattr(factory, 'access_times'): atime = os.stat(src_path)[stat.ST_ATIME] factory.access_times[cache_path] = atime log.msg(file + ' imported', 'import')
def import_debs(factory, dir): if not os.path.exists(dir): os.makedirs(dir) for file in os.listdir(dir): if file[-4:]!='.deb': log.msg("IGNORING:"+ file, 'import') continue log.msg("considering:"+ dir+'/'+file, 'import') paths = get_mirror_path(factory, dir+'/'+file) if paths: if len(paths) != 1: log.msg("WARNING: multiple ocurrences", 'import') log.msg(str(paths), 'import') path = paths[0] else: log.msg("Not found, trying to guess", 'import') path = closest_match(AptDpkgInfo(dir+'/'+file), get_mirror_versions(factory, dir+'/'+file)) if path: log.msg("MIRROR_PATH:"+ path, 'import') spath = dir+'/'+file dpath = factory.cache_dir+path if not os.path.exists(dpath): log.msg("IMPORTING:"+spath, 'import') dpath = re.sub(r'/\./', '/', dpath) if not os.path.exists(dirname(dpath)): os.makedirs(dirname(dpath)) f = open(dpath, 'w') fcntl.lockf(f.fileno(), fcntl.LOCK_EX) f.truncate(0) shutil.copy2(spath, dpath) f.close() if hasattr(factory, 'access_times'): atime = os.stat(spath)[stat.ST_ATIME] factory.access_times[path] = atime for backend in factory.backends: backend.packages.unload()
log.msg("Not found, trying to guess", 'import') path = closest_match(AptDpkgInfo(dir+'/'+file), get_mirror_versions(factory, dir+'/'+file)) if path: log.msg("MIRROR_PATH:"+ path, 'import') spath = dir+'/'+file dpath = factory.cache_dir+path if not os.path.exists(dpath): log.msg("IMPORTING:"+spath, 'import') dpath = re.sub(r'/\./', '/', dpath) if not os.path.exists(dirname(dpath)): os.makedirs(dirname(dpath)) f = open(dpath, 'w') fcntl.lockf(f.fileno(), fcntl.LOCK_EX) f.truncate(0) shutil.copy2(spath, dpath) f.close() if hasattr(factory, 'access_times'): atime = os.stat(spath)[stat.ST_ATIME] factory.access_times[path] = atime for backend in factory.backends: backend.packages.unload()
log.msg(file + ' skipped - already in cache', 'import') else: log.msg(file + ' skipped - no suitable backend found', 'import')
def import_debs(factory, dir): if not os.path.exists(dir): os.makedirs(dir) for file in os.listdir(dir): if file[-4:]!='.deb': log.msg("IGNORING:"+ file, 'import') continue log.msg("considering:"+ dir+'/'+file, 'import') paths = get_mirror_path(factory, dir+'/'+file) if paths: if len(paths) != 1: log.msg("WARNING: multiple ocurrences", 'import') log.msg(str(paths), 'import') path = paths[0] else: log.msg("Not found, trying to guess", 'import') path = closest_match(AptDpkgInfo(dir+'/'+file), get_mirror_versions(factory, dir+'/'+file)) if path: log.msg("MIRROR_PATH:"+ path, 'import') spath = dir+'/'+file dpath = factory.cache_dir+path if not os.path.exists(dpath): log.msg("IMPORTING:"+spath, 'import') dpath = re.sub(r'/\./', '/', dpath) if not os.path.exists(dirname(dpath)): os.makedirs(dirname(dpath)) f = open(dpath, 'w') fcntl.lockf(f.fileno(), fcntl.LOCK_EX) f.truncate(0) shutil.copy2(spath, dpath) f.close() if hasattr(factory, 'access_times'): atime = os.stat(spath)[stat.ST_ATIME] factory.access_times[path] = atime for backend in factory.backends: backend.packages.unload()
log.debug('Opening database ' + filename)
def open_shelve(filename): from bsddb3 import db,dbshelve log.debug('Opening database ' + filename)
try: shelve.verify(filename) except: os.rename(filename, filename+'.error') log.msg(filename+' could not be opened, moved to '+filename+'.error','db', 1) log.msg('Recreating '+ filename,'db', 1)
try: log.debug('Verifying database: ' + filename) shelve.verify(filename) except: os.rename(filename, filename+'.error') log.msg(filename+' could not be opened, moved to '+filename+'.error','db', 1) log.msg('Recreating '+ filename,'db', 1)
def open_shelve(filename): from bsddb3 import db,dbshelve log.debug('Opening database ' + filename)
shelve = dbshelve.open(filename)
log.debug('Opening database ' + filename) shelve = dbshelve.open(filename)
def open_shelve(filename): from bsddb3 import db,dbshelve log.debug('Opening database ' + filename)
log.debug('Upgrading from previous database format: %s' % filename + '.previous') from bsddb import dbshelve as old_dbshelve
log.msg('Upgrading from previous database format: %s' % filename + '.previous') import bsddb.dbshelve
def open_shelve(filename): from bsddb3 import db,dbshelve log.debug('Opening database ' + filename)
previous_shelve = old_dbshelve.open(filename + '.previous')
previous_shelve = bsddb.dbshelve.open(filename + '.previous')
def open_shelve(filename): from bsddb3 import db,dbshelve log.debug('Opening database ' + filename)
log.debug("abort - not implemented")
log.debug("abort - method not implemented")
def process(self): """ Each new request begins processing here """ log.debug("Request: " + self.method + " " + self.uri); # Clean up URL self.uri = self.simplify_path(self.uri)
from bsddb3 import db,dbshelve,DBInvalidArgError
from bsddb3 import db,dbshelve
def open_shelve(filename): from bsddb3 import db,dbshelve,DBInvalidArgError log.debug('Opening database ' + filename)
except DBInvalidArgError:
except db.DBInvalidArgError:
def open_shelve(filename): from bsddb3 import db,dbshelve,DBInvalidArgError log.debug('Opening database ' + filename)
del self.factory.runningClients[self.request.uri]
try: del self.factory.runningClients[self.request.uri] except exceptions.KeyError: self.factory.debug("We are not on runningClients!!!") self.factory.debug(str(self.factory.runningClients)) raise exceptions.KeyError
def aptEnd(self): """ Called by subclasses when aptDataEnd does too much things.
self.process = reactor.spawnProcess(self, exe, args)
self.nullhandle = open("/dev/null", "w") self.process = reactor.spawnProcess(self, exe, args, childFDs = { 0:"w", 1:self.nullhandle.fileno(), 2:"r" })
def __init__(self, request): self.factory = request.factory self.deferred = defer.Deferred() # Deferred that passes status back self.path = request.local_file
log.debug("Last request removed",'client')
log.debug("Last request removed",'Fetcher')
def remove_request(self, request): """ Request should NOT be served through this Fetcher, the client probably closed the connection. If this is our last request, we may also close the connection with the server depending on the configuration.
"telling the transport to loseConnection",'client')
"telling the transport to loseConnection",'Fetcher')
def remove_request(self, request): """ Request should NOT be served through this Fetcher, the client probably closed the connection. If this is our last request, we may also close the connection with the server depending on the configuration.
log.debug(str(request.backend) + request.uri, 'fetcher_activate')
log.debug(str(request.backend) + request.uri, 'Fetcher.activate')
def activate(self, request): log.debug(str(request.backend) + request.uri, 'fetcher_activate') self.local_file = request.local_file self.local_mtime = request.local_mtime self.factory = request.factory self.request = request request.content.read()
log.debug("Finished receiving data, status:%d saveData:%d" %(self.status_code, saveData));
log.debug("Finished receiving data, status:%d saveData:%d" %(self.status_code, saveData), 'Fetcher');
def apDataEnd(self, data, saveData=True): """ Called by subclasses when the data transfer is over.
log.debug("no local time: "+self.local_file,'client')
log.debug("no local time: "+self.local_file,'Fetcher')
def apDataEnd(self, data, saveData=True): """ Called by subclasses when the data transfer is over.
"connection already closed")
"connection already closed", 'Fetcher')
def apDataEnd(self, data, saveData=True): """ Called by subclasses when the data transfer is over.
log.debug("We are not on runningFetchers!!!",'client')
log.debug("We are not on runningFetchers!!!",'Fetcher')
def apEnd(self): """ Called by subclasses when apDataEnd does too many things.
'client')
'Fetcher')
def apEnd(self): """ Called by subclasses when apDataEnd does too many things.
log.debug(' URI:' + self.request.uri, 'fetcher_activate')
log.debug(' URI:' + self.request.uri, 'Fetcher')
def apEnd(self): """ Called by subclasses when apDataEnd does too many things.
+str(self.factory.runningFetchers),'client')
+str(self.factory.runningFetchers),'Fetcher')
def apEnd(self): """ Called by subclasses when apDataEnd does too many things.
log.debug("have active Fetcher",'file_client')
log.debug("have active Fetcher",'Fetcher')
def apEndTransfer(self, fetcher_class): """ Remove this Fetcher and transfer all it's requests to a new instance of 'fetcher_class'. """ #Consider something like this: #req = dummyFetcher.fix_ref_request() #fetcher = fetcher_class() #dummyFetcher.transfer_requests(fetcher) #dummyFetcher.apEnd() #fetcher.activate(req)
log.debug("Connection Failed: "+str(reason))
log.debug("Connection Failed: "+str(reason), 'Fetcher')
def connectionFailed(self, reason=None): """ Tell our requests that the connection with the server failed. """ msg = '[%s] Connection Failed: %s/%s'%( self.request.backend.base, self.request.backendServer.path, self.request.backend_uri)
'client','9')
'Fetcher','9')
def connectionFailed(self, reason=None): """ Tell our requests that the connection with the server failed. """ msg = '[%s] Connection Failed: %s/%s'%( self.request.backend.base, self.request.backendServer.path, self.request.backend_uri)
request.transport.loseConnection
request.finish()
def start_transfer(self, request): self.if_modified(request) if len(self.requests) == 0: #we had a single request and didn't have to send it self.apEnd() return
def open_shelve(filename):
def open_shelve(dbname):
def open_shelve(filename): from bsddb3 import db,dbshelve
db_dir = self.cache_dir+'/'+status_dir+'/db' if not os.path.exists(db_dir): os.makedirs(db_dir)
def open_shelve(filename): from bsddb3 import db,dbshelve
self.update_times = open_shelve(db_dir+'/update.db')
self.update_times = open_shelve('update')
def open_shelve(filename): from bsddb3 import db,dbshelve
self.access_times = open_shelve(db_dir+'/access.db')
self.access_times = open_shelve('access')
def open_shelve(filename): from bsddb3 import db,dbshelve
self.packages = open_shelve(db_dir+'/packages.db')
self.packages = open_shelve('packages')
def open_shelve(filename): from bsddb3 import db,dbshelve
self.local_mtime = os.stat(self.host_file)[stat.ST_MTIME]
if os.path.exists(self.host_file): self.local_mtime = os.stat(self.host_file)[stat.ST_MTIME]
def host_transfer_done(self): """ Called by our LoopbackRequest when the real Fetcher calls finish() on it.
basic.FileSender().beginFileTransfer(file, request).addCallback(self.apEnd).addCallback(lambda r: file.close())
basic.FileSender().beginFileTransfer(file, request).addCallback(self.apEnd).addCallback(lambda r: file.close()).addCallback(lambda r: request.transport.loseConnection())
def insert_request(self, request): if not request.serve_if_cached: request.finish() return Fetcher.insert_request(self, request) self.if_modified(request) file = open(self.local_file,'rb') fcntl.lockf(file.fileno(), fcntl.LOCK_SH)
basic.FileSender().beginFileTransfer(file, self.request).addCallback(self.apEnd).addCallback(lambda r: file.close())
basic.FileSender().beginFileTransfer(file, self.request).addCallback(self.apEnd).addCallback(lambda r: file.close()).addCallback(lambda r: request.transport.loseConnection())
def activate(self, request): Fetcher.activate(self, request) if not request.apFetcher: return self.factory.file_served(request.uri)
FileType( re.compile(r"/(Packages|Release|Sources|Contents-.*)(\.(gz|bz2))?$"), "text/plain", 1),
FileType(re.compile(r"/(Packages|Release(\.gpg)?|Sources|Contents-.*)" r"(\.(gz|bz2))?$"), "text/plain", 1),
def check (self, name): "Returns true if name is of this filetype" if self.regex.search(name): return 1 else: return 0
import bsddb
from bsddb import dbshelve as old_dbshelve
def open_shelve(filename): from bsddb3 import db,dbshelve log.debug('Opening database ' + filename)
previous_shelve = bsddb.dbshelve.open(filename + '.previous')
previous_shelve = old_dbshelve.open(filename + '.previous')
def open_shelve(filename): from bsddb3 import db,dbshelve log.debug('Opening database ' + filename)
def __fake_stdout(self): import tempfile null = tempfile.TemporaryFile() self.real_stdout_fd = os.dup(sys.stdout.fileno()) os.dup2(null.fileno(), sys.stdout.fileno())
def __save_stdout(self): self.real_stdout_fd = os.dup(1) os.close(1)
def __fake_stdout(self): import tempfile null = tempfile.TemporaryFile() self.real_stdout_fd = os.dup(sys.stdout.fileno()) os.dup2(null.fileno(), sys.stdout.fileno())
os.dup2(self.real_stdout_fd, sys.stdout.fileno())
os.dup2(self.real_stdout_fd, 1)
def __restore_stdout(self): os.dup2(self.real_stdout_fd, sys.stdout.fileno()) os.close(self.real_stdout_fd) del self.real_stdout_fd
self.cache = apt_pkg.GetCache()
if log.isEnabled('apt'): self.cache = apt_pkg.GetCache() else: self.__save_stdout() self.cache = apt_pkg.GetCache() self.__restore_stdout()
def load(self): """ Regenerates the fake configuration and load the packages server. """ if not self.loaded: shutil.rmtree(self.status_dir+'/apt/lists/') os.makedirs(self.status_dir+'/apt/lists/partial') sources = open(self.status_dir+'/'+'apt/etc/sources.list', 'w') for file in self.packages.keys(): # we should probably clear old entries from self.packages and # take into account the recorded mtime as optimization fake_uri='http://apt-proxy:'+file source_line='deb '+dirname(fake_uri)+'/ /' listpath=(self.status_dir+'/apt/lists/' +apt_pkg.URItoFileName(fake_uri)) sources.write(source_line+'\n')
"Tryes to make the packages server quit."
"Tries to make the packages server quit."
def unload(self): "Tryes to make the packages server quit." if self.loaded: del self.cache del self.records self.loaded = 0
laterID = None file = None
def loseConnection(self): "Kill rsync process" if self.transport: if self.transport.pid: log.debug("killing rsync child" + str(self.transport.pid), 'rsync_client') os.kill(self.transport.pid, signal.SIGTERM) #self.transport.loseConnection()
self.file = open(self.local_file,'rb') fcntl.lockf(self.file.fileno(), fcntl.LOCK_SH)
file = open(self.local_file,'rb') fcntl.lockf(file.fileno(), fcntl.LOCK_SH)
def activate(self, request): Fetcher.activate(self, request) if not request.apFetcher: return self.factory.file_served(request.uri)
self.readBuffer() def readBuffer(self): self.laterID = None data = self.file.read(abstract.FileDescriptor.bufferSize) self.apDataReceived(data) if self.file.tell() == self.size: self.apDataReceived("") for req in self.requests: req.finish() self.apEnd() else: self.laterID = reactor.callLater(0, self.readBuffer) def apEnd(self): if self.laterID: self.laterID.cancel() if self.file: self.file.close() Fetcher.apEnd(self)
basic.FileSender().beginFileTransfer(file, self.request).addCallback(self.apEnd).addCallback(lambda r: file.close()) def apEnd(self, ignored=None): if len(self.requests) == 0: Fetcher.apEnd(self)
def activate(self, request): Fetcher.activate(self, request) if not request.apFetcher: return self.factory.file_served(request.uri)
'serve_cached': this is somewhat of a hack only usefull for
'serve_cached': this is somewhat of a hack only useful for
def fetch(self, serve_cached=1): """ Serve 'self' from cache or through the appropriate Fetcher depending on the asociated backend. Use post_convert and gzip_convert regular expresions of the Fetcher to gzip/gunzip file before and after download. 'serve_cached': this is somewhat of a hack only usefull for LoopbackRequests (See LoopbackRequest class for more information). """ def fetch_real(result, dummyFetcher, cached, running): """ This is called after verifying if the file is properly cached. If 'cached' the requested file is properly cached. If not 'cached' the requested file was not there, didn't pass the integrity check or may be outdated. """ __pychecker__ = 'unusednames=result' if len(dummyFetcher.requests)==0: #The request's are gone, the clients probably closed the #conection log.debug("THE REQUESTS ARE GONE (Clients closed conection)", 'fetch') dummyFetcher.apEnd() return
"If he wanted to know, tell dady that we are served."
"If he wanted to know, tell daddy that we are served."
def finish(self): "If he wanted to know, tell dady that we are served." if self.finish_cb: self.finish_cb() self.transport = None pass
Each incomming request is passed to a new Request instance.
Each incoming request is passed to a new Request instance.
def finish(self): "If he wanted to know, tell dady that we are served." if self.finish_cb: self.finish_cb() self.transport = None pass
"If the connection is lost, notify all my requets"
"If the connection is lost, notify all my requests"
def connectionLost(self, reason=None): "If the connection is lost, notify all my requets" __pychecker__ = 'unusednames=reason' for req in self.requests: req.connectionLost() log.debug("Client connection closed") if log.isEnabled('memleak'): memleak.print_top_10()
if re.search('/../', self.uri):
if re.search('/\.\./', self.uri):
def process(self): """ Each new request begins processing here """ log.debug("Request: " + self.method + " " + self.uri); # Clean up URL self.uri = self.simplify_path(self.uri)
if (self.__class__ != FetcherFile or req.serve_if_cached):
if (fetcher_class != FetcherFile or req.serve_if_cached):
def apEndTransfer(self, fetcher_class): """ Remove this Fetcher and transfer all it's requests to a new instance of 'fetcher_class'. """ #Consider something like this: #req = dummyFetcher.fix_ref_request() #fetcher = fetcher_class() #dummyFetcher.transfer_requests(fetcher) #dummyFetcher.apEnd() #fetcher.activate(req)
def activate(self, request):
def activate(self, request, postconverting=0):
def activate(self, request): Fetcher.activate(self, request) if not request.apFetcher: return
loop = LoopbackRequest(request, self.host_transfer_done) loop.uri = host_uri loop.local_file = self.host_file loop.process() self.loop_req = loop loop.fetch(serve_cached=0)
running = self.factory.runningFetchers if not postconverting or running.has_key(self.host_file): loop = LoopbackRequest(request, self.host_transfer_done) loop.uri = host_uri loop.local_file = self.host_file loop.process() self.loop_req = loop loop.serve_if_cached=0 if running.has_key(self.host_file): running[self.host_file].insert_request(loop) else: loop.fetch(serve_cached=0) else: self.loop_req = None self.host_transfer_done()
def activate(self, request): Fetcher.activate(self, request) if not request.apFetcher: return
if self.loop_req.code != http.OK:
if self.loop_req and self.loop_req.code != http.OK:
def host_transfer_done(self): """ Called by our LoopbackRequest when the real Fetcher calls finish() on it.
post_convert = re.compile(r"^Should not match anything$") gzip_convert = post_convert
post_convert = re.compile(r"/Packages.gz$") gzip_convert = re.compile(r"^Should not match anything$")
def processEnded(self, reason=None): log.debug("Status: %d" %(self.process.status),'rsync_client') if self.process.status != 0: self.setResponseCode(http.NOT_FOUND) if not os.path.exists(self.local_file): try: os.removedirs(self.local_dir) except: pass
print "CONVERT:", fetcher.post_convert
def fetch_real(result, dummyFetcher, cached, running): """ This is called after verifying if the file is properly cached. If 'cached' the requested file is properly cached. If not 'cached' the requested file was not there, didn't pass the integrity check or may be outdated. """ if len(dummyFetcher.requests)==0: #The request's are gone, the clients probably closed the #conection log.debug("THE REQUESTS ARE GONE (Clients closed conection)") dummyFetcher.apEnd() return
FetcherGzip(loop)
gzip = FetcherGzip() gzip.activate(loop, postconverting=1)
def fetch_real(result, dummyFetcher, cached, running): """ This is called after verifying if the file is properly cached. If 'cached' the requested file is properly cached. If not 'cached' the requested file was not there, didn't pass the integrity check or may be outdated. """ if len(dummyFetcher.requests)==0: #The request's are gone, the clients probably closed the #conection log.debug("THE REQUESTS ARE GONE (Clients closed conection)") dummyFetcher.apEnd() return
log.debug("have active fetcher",'client')
log.debug("have active fetcher: "+self.uri,'client')
def fetch_real(result, dummyFetcher, cached, running): """ This is called after verifying if the file is properly cached. If 'cached' the requested file is properly cached. If not 'cached' the requested file was not there, didn't pass the integrity check or may be outdated. """ if len(dummyFetcher.requests)==0: #The request's are gone, the clients probably closed the #conection log.debug("THE REQUESTS ARE GONE (Clients closed conection)") dummyFetcher.apEnd() return
creator = ClientCreator(reactor, ftp.FTPClient, passive=0)
if not request.backend.username: creator = ClientCreator(reactor, ftp.FTPClient, passive=0) else: creator = ClientCreator(reactor, ftp.FTPClient, request.backend.username, request.backend.password, passive=0)
def activate (self, request): Fetcher.activate(self, request) if not request.apFetcher: return
for k previous_shelve.keys():
for k in previous_shelve.keys():
def open_shelve(filename): from bsddb3 import db,dbshelve,DBInvalidArgError log.debug('Opening database ' + filename)
moderated = self.getValueFor('moderated')
moderated = group.getValueFor('moderated')
def send_email(self, REQUEST, RESPONSE, group_id, email_address, email_id, message, subject=''): """ Send an email to the group. """ list_manager = self.get_xwfMailingListManager() sec = getSecurityManager() user = sec.getUser() if email_address not in user.get_emailAddresses(): raise 'Forbidden', 'Only the authenticated owner of an email address may use it to post' group = getattr(list_manager, group_id)
subject = 'Re: %s' % orig_email.getProperty('subject')
subject = 'Re: %s' % orig_email.getProperty('mailSubject')
def send_email(self, REQUEST, RESPONSE, group_id, email_address, email_id, message, subject=''): """ Send an email to the group. We send the email via the mail server so it will handle things like message ID's for us. """ list_manager = self.get_xwfMailingListManager() sec = getSecurityManager() user = sec.getUser() if email_address not in user.get_emailAddresses(): raise 'Forbidden', 'Only the authenticated owner of an email address may use it to post' group = getattr(list_manager, group_id) group_email = group.getProperty('mailto') group_name = group.getProperty('title') message_id = None if email_id: orig_email = self.get_email(email_id) subject = 'Re: %s' % orig_email.getProperty('subject') message_id = orig_email.getProperty('message-id', '') name = '%s %s' % (user.preferredName, user.lastName) headers = """From: %s <%s>
class GSBaseMessageView(Products.Five.BrowserView):
class GSNewTopicView(Products.Five.BrowserView, GSGroupObject):
def get_group_info(self): assert self.__groupInfo retval = self.__groupInfo assert retval return retval
self.set_archive(self.context.messages) self.set_emailId(self.context.REQUEST.form.get('id', None)) self.init_email() self.init_topic() assert self.archive assert self.emailId assert self.email assert self.topic def set_archive(self, archive): """Set the email message archive to "archive".""" assert archive self.archive = archive assert self.archive def get_archive(self): """Get the email message archive.""" assert self.archive return self.archive def set_emailId(self, emailId): assert emailId self.emailId = emailId assert self.emailId def get_emailId(self): return self.emailId def init_email(self): assert self.emailId self.email = None self.email = self.archive.get_email(self.emailId) assert self.email def get_email(self): retval = self.email assert retval return retval def init_topic(self): assert self.emailId assert self.archive assert self.email query = {'compressedTopic': '%s' % self.email.compressedSubject} result = self.archive.find_email(query) assert result self.topic = map(lambda x: x.getObject(), result) assert self.topic assert self.topic.append assert len(self.topic) > 0 def get_topic(self): assert self.topic assert self.topic.append return self.topic def get_topic_name(self): assert self.email retval = self.email['mailSubject'] return retval def process_form(self): pass class GSTopicView(GSBaseMessageView, GSGroupObject): """View of a GroupServer Topic""" def __init__(self, context, request): assert context assert request GSBaseMessageView.__init__(self, context, request)
def __init__(self, context, request): # Preconditions assert context assert request Products.Five.BrowserView.__init__(self, context, request) self.set_archive(self.context.messages) self.set_emailId(self.context.REQUEST.form.get('id', None)) self.init_email() self.init_topic() # Postconditions assert self.archive assert self.emailId assert self.email assert self.topic
self.init_threads() def init_threads(self): assert self.topic assert self.archive self.threads = self.archive.get_all_threads({}, 'mailDate', 'asc') self.threadNames = map(lambda thread: thread[1][0]['mailSubject'], self.threads) currThreadName = self.get_topic_name() assert currThreadName in self.threadNames self.currThreadIndex = self.threadNames.index(currThreadName) def post_date_storter(self, a, b): if a['mailDate'] > b['mailDate']: retval = 1 elif a['mailDate'] == b['mailDate']: retval = 0 else: retval = -1 assert retval in (1, 0, -1) return retval def init_topic(self): GSBaseMessageView.init_topic(self) self.topic.sort(self.post_date_storter) def get_next_topic(self): assert self.threads retval = None nextThreadIndex = self.currThreadIndex - 1 if nextThreadIndex >= 0: ntID = self.threads[nextThreadIndex][1][0]['id'] ntName = self.threads[nextThreadIndex][1][0]['mailSubject'] retval = (ntID, ntName) else: retval = (None, None) assert len(retval) == 2 return retval def get_previous_topic(self): assert self.threads retval = None previousThreadIndex = self.currThreadIndex + 1 if previousThreadIndex < len(self.threads): ptID = self.threads[previousThreadIndex][1][0]['id'] ptName = self.threads[previousThreadIndex][1][0]['mailSubject'] retval = (ptID, ptName) else: retval = (None, None) assert len(retval) == 2 return retval
def __init__(self, context, request): # Preconditions assert context assert request GSBaseMessageView.__init__(self, context, request) GSGroupObject.__init__(self, context) self.init_threads()
pp = '/'.join(indexable.getPhysicalPath())
pp = '/'.join(object.getPhysicalPath())
def reindex_mailObjects(self): """ Reindex the mailObjects that we contain. """ for object in self.archive.objectValues('Folder'): if hasattr(object, 'mailFrom'): pp = '/'.join(indexable.getPhysicalPath()) self.Catalog.uncatalog_object(pp) self.Catalog.catalog_object(indexable, pp) return indexables