diff options
| author | yo mama <pepper@scannerjammer.com> | 2015-01-25 23:46:43 -0800 |
|---|---|---|
| committer | yo mama <pepper@scannerjammer.com> | 2015-01-25 23:46:43 -0800 |
| commit | e49ddd41e5549c1c6abab8005edd2e8b18ee0e09 (patch) | |
| tree | 017a97ce4f17c9a4422e80f56f4e638c629438d4 | |
working copy...first push
355 files changed, 33797 insertions, 0 deletions
diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..d8dd753 --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +*.wav diff --git a/.lock-waf_linux2_build b/.lock-waf_linux2_build new file mode 100644 index 0000000..c089c9c --- /dev/null +++ b/.lock-waf_linux2_build @@ -0,0 +1,8 @@ +argv = ['./waf', 'configure'] +environ = {'WINDOWID': '20971526', 'INFINALITY_FT_FRINGE_FILTER_STRENGTH': '50', 'INFINALITY_FT_USE_VARIOUS_TWEAKS': 'true', 'LOGNAME': 'pepper', 'USER': 'pepper', 'PATH': '/home/pepper/.gem/ruby/2.1.0/bin:/home/pepper/.gem/ruby/2.1.0/bin:/usr/local/bin:/usr/bin:/bin:/usr/local/sbin:/usr/sbin:/sbin:/usr/firstscripts:/root/.gem/ruby/2.1.0/bin:/opt/android-sdk/build-tools/19.0.2/:/opt/android-sdk/platform-tools:/opt/android-sdk/tools:/opt/devkitpro/devkitPPC/bin:/opt/marytts/bin:/usr/bin/site_perl:/usr/bin/vendor_perl:/usr/bin/core_perl:/opt/android-sdk/tools/:/opt/android-sdk/platform-tools/:/opt/android-sdk/tools/:/opt/android-sdk/platform-tools/', 'XDG_VTNR': '1', 'HOME': '/home/pepper', 'paul': '6172753373', 'HG': '/usr/bin/hg', 'LANG': 'en_US.UTF-8', 'TERM': 'xterm-256color', 'SHELL': '/bin/bash', 'XAUTHORITY': '/home/pepper/.Xauthority', 'SHLVL': '3', 'DBUS_SESSION_BUS_ADDRESS': 'unix:abstract=/tmp/dbus-AtdPifsQ9w,guid=1c8a4fb2d96ee041948d9c9554c5d823', 'WINDOWPATH': '1', 'EDITOR': 'vim', 'JAVA_HOME': '/usr/lib/jvm/java-7-openjdk', 'ANDROID_SWT': '/usr/share/java', 'dad': '6174598251', 'XDG_RUNTIME_DIR': '/run/user/1002', 'VTE_VERSION': '3603', 'INFINALITY_FT_FILTER_PARAMS': '06 22 36 22 06', 'ANDROID_HOME': '/opt/android-sdk', 'XDG_SESSION_ID': 'c1', 'DEVKITPRO': '/opt/devkitpro', '_': './waf', 'MOZ_PLUGIN_PATH': '/usr/lib/mozilla/plugins', 'GREP_OPTIONS': '--color=auto', 'DISPLAY': ':0', 'GTK_MODULES': 'canberra-gtk-module', 'INFINALITY_FT_CHROMEOS_STYLE_SHARPENING_STRENGTH': '20', 'OLDPWD': '/home/pepper/LV2_render/src', 'ANT_HOME': '/usr/share/apache-ant', 'DEVKITPPC': '/opt/devkitpro/devkitPPC', 'PWD': '/home/pepper/LV2_render', 'calla': '3109381479', 'COLORTERM': 'gnome-terminal', 'MAIL': '/var/spool/mail/pepper', 'XDG_SEAT': 'seat0'} +files = ['/home/pepper/LV2_render/wscript'] +hash = '~\x8a\x88\x18\xd5\xff\\\xc7\x1e+Y\xf0\xa2hK\xb4' +options = {'docdir': None, 'ultra_strict': False, 'force': False, 'verbose': 0, 'pardebug': False, 'destdir': '', 'no_jack_session': False, 'zones': '', 'prefix': '/usr/local/', 'download': False, 'grind': False, 'targets': '', 'configdir': None, 'mandir': None, 'nocache': False, 'progress_bar': 0, 'top': '', 'libdir': None, 'strict': False, 'datadir': None, 'lv2_user': False, 'out': '', 'check_cxx_compiler': 'g++ icpc', 'bindir': None, 'files': '', 'lv2_system': False, 'jobs': 4, 'docs': False, 'distcheck_args': None, 'no_qt': False, 'lv2dir': None, 'keep': 0, 'includedir': None, 'debug': False, 'check_c_compiler': 'gcc icc'} +out_dir = '/home/pepper/LV2_render/build' +run_dir = '/home/pepper/LV2_render' +top_dir = '/home/pepper/LV2_render' diff --git a/.lock-waf_linux_build b/.lock-waf_linux_build new file mode 100644 index 0000000..d07cd62 --- /dev/null +++ b/.lock-waf_linux_build @@ -0,0 +1,8 @@ +argv = ['./waf', 'configure'] +environ = {'TERM': 'xterm-256color', 'HOME': '/home/pepper', 'HISTFILESIZE': '100000', 'SSH_CONNECTION': '104.34.76.116 45733 199.180.249.102 22', 'PATH': '/usr/local/bin:/usr/bin:/bin:/usr/bin:/bin:/usr/sbin:/sbin:/opt/marytts/bin:/usr/bin/site_perl:/usr/bin/vendor_perl:/usr/bin/core_perl', 'XDG_RUNTIME_DIR': '/run/user/1000', 'SSH_CLIENT': '104.34.76.116 45733 22', 'SHELL': '/bin/bash', 'DINO': '/var/www/dino.fm', 'LOGNAME': 'pepper', 'LS_COLORS': 'rs=0:di=01;34:ln=01;36:mh=00:pi=40;33:so=01;35:do=01;35:bd=40;33;01:cd=40;33;01:or=40;31;01:su=37;41:sg=30;43:ca=30;41:tw=30;42:ow=34;42:st=37;44:ex=01;32:*.tar=01;31:*.tgz=01;31:*.arc=01;31:*.arj=01;31:*.taz=01;31:*.lha=01;31:*.lz4=01;31:*.lzh=01;31:*.lzma=01;31:*.tlz=01;31:*.txz=01;31:*.tzo=01;31:*.t7z=01;31:*.zip=01;31:*.z=01;31:*.Z=01;31:*.dz=01;31:*.gz=01;31:*.lrz=01;31:*.lz=01;31:*.lzo=01;31:*.xz=01;31:*.bz2=01;31:*.bz=01;31:*.tbz=01;31:*.tbz2=01;31:*.tz=01;31:*.deb=01;31:*.rpm=01;31:*.jar=01;31:*.war=01;31:*.ear=01;31:*.sar=01;31:*.rar=01;31:*.alz=01;31:*.ace=01;31:*.zoo=01;31:*.cpio=01;31:*.7z=01;31:*.rz=01;31:*.cab=01;31:*.jpg=01;35:*.jpeg=01;35:*.gif=01;35:*.bmp=01;35:*.pbm=01;35:*.pgm=01;35:*.ppm=01;35:*.tga=01;35:*.xbm=01;35:*.xpm=01;35:*.tif=01;35:*.tiff=01;35:*.png=01;35:*.svg=01;35:*.svgz=01;35:*.mng=01;35:*.pcx=01;35:*.mov=01;35:*.mpg=01;35:*.mpeg=01;35:*.m2v=01;35:*.mkv=01;35:*.webm=01;35:*.ogm=01;35:*.mp4=01;35:*.m4v=01;35:*.mp4v=01;35:*.vob=01;35:*.qt=01;35:*.nuv=01;35:*.wmv=01;35:*.asf=01;35:*.rm=01;35:*.rmvb=01;35:*.flc=01;35:*.avi=01;35:*.fli=01;35:*.flv=01;35:*.gl=01;35:*.dl=01;35:*.xcf=01;35:*.xwd=01;35:*.yuv=01;35:*.cgm=01;35:*.emf=01;35:*.axv=01;35:*.anx=01;35:*.ogv=01;35:*.ogx=01;35:*.aac=00;36:*.au=00;36:*.flac=00;36:*.mid=00;36:*.midi=00;36:*.mka=00;36:*.mp3=00;36:*.mpc=00;36:*.ogg=00;36:*.ra=00;36:*.wav=00;36:*.axa=00;36:*.oga=00;36:*.spx=00;36:*.xspf=00;36:', 'NODE_PATH': '/usr/lib/node_modules', 'PAGER': '/usr/bin/less', 'HISTSIZE': '100000', 'PWD': '/home/pepper/LV2/jalv-1.4.6', 'USER': 'pepper', 'EDITOR': '/usr/bin/vim', '_': './waf', 'SSH_TTY': '/dev/pts/5', 'MAIL': '/var/spool/mail/pepper', 'LANG': 'en_US', 'OLDPWD': '/home/pepper/LV2/jalv-1.4.6/src', 'SHLVL': '1', 'XDG_SESSION_ID': 'c1180', 'HISTCONTROL': 'ignoredups:erasedups', 'JAVA_HOME': '/usr/lib/jvm/java-7-openjdk'} +files = ['/home/pepper/LV2/jalv-1.4.6/wscript'] +hash = b'\xd5\xec~D\xdcJ\x8e\xc0qt\xde\x80$(^B' +options = {'keep': 0, 'force': False, 'check_cxx_compiler': 'g++ icpc', 'destdir': '', 'prefix': '/usr/local/', 'configdir': None, 'check_c_compiler': 'gcc icc', 'download': False, 'top': '', 'targets': '', 'verbose': 0, 'ultra_strict': False, 'jobs': 4, 'nocache': False, 'libdir': None, 'docs': False, 'datadir': None, 'distcheck_args': None, 'strict': False, 'no_qt': False, 'lv2_user': False, 'docdir': None, 'zones': '', 'grind': False, 'mandir': None, 'pardebug': False, 'no_jack_session': False, 'includedir': None, 'bindir': None, 'debug': False, 'lv2dir': None, 'out': '', 'files': '', 'lv2_system': False, 'progress_bar': 0} +out_dir = '/home/pepper/LV2/jalv-1.4.6/build' +run_dir = '/home/pepper/LV2/jalv-1.4.6' +top_dir = '/home/pepper/LV2/jalv-1.4.6' diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Build.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Build.py new file mode 100644 index 0000000..f76933a --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Build.py @@ -0,0 +1,758 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,sys,errno,re,shutil +try: + import cPickle +except ImportError: + import pickle as cPickle +from waflib import Runner,TaskGen,Utils,ConfigSet,Task,Logs,Options,Context,Errors +import waflib.Node +CACHE_DIR='c4che' +CACHE_SUFFIX='_cache.py' +INSTALL=1337 +UNINSTALL=-1337 +SAVED_ATTRS='root node_deps raw_deps task_sigs'.split() +CFG_FILES='cfg_files' +POST_AT_ONCE=0 +POST_LAZY=1 +POST_BOTH=2 +class BuildContext(Context.Context): + '''executes the build''' + cmd='build' + variant='' + def __init__(self,**kw): + super(BuildContext,self).__init__(**kw) + self.is_install=0 + self.top_dir=kw.get('top_dir',Context.top_dir) + self.run_dir=kw.get('run_dir',Context.run_dir) + self.post_mode=POST_AT_ONCE + self.out_dir=kw.get('out_dir',Context.out_dir) + self.cache_dir=kw.get('cache_dir',None) + if not self.cache_dir: + self.cache_dir=self.out_dir+os.sep+CACHE_DIR + self.all_envs={} + self.task_sigs={} + self.node_deps={} + self.raw_deps={} + self.cache_dir_contents={} + self.task_gen_cache_names={} + self.launch_dir=Context.launch_dir + self.jobs=Options.options.jobs + self.targets=Options.options.targets + self.keep=Options.options.keep + self.cache_global=Options.cache_global + self.nocache=Options.options.nocache + self.progress_bar=Options.options.progress_bar + self.deps_man=Utils.defaultdict(list) + self.current_group=0 + self.groups=[] + self.group_names={} + def get_variant_dir(self): + if not self.variant: + return self.out_dir + return os.path.join(self.out_dir,self.variant) + variant_dir=property(get_variant_dir,None) + def __call__(self,*k,**kw): + kw['bld']=self + ret=TaskGen.task_gen(*k,**kw) + self.task_gen_cache_names={} + self.add_to_group(ret,group=kw.get('group',None)) + return ret + def rule(self,*k,**kw): + def f(rule): + ret=self(*k,**kw) + ret.rule=rule + return ret + return f + def __copy__(self): + raise Errors.WafError('build contexts are not supposed to be copied') + def install_files(self,*k,**kw): + pass + def install_as(self,*k,**kw): + pass + def symlink_as(self,*k,**kw): + pass + def load_envs(self): + node=self.root.find_node(self.cache_dir) + if not node: + raise Errors.WafError('The project was not configured: run "waf configure" first!') + lst=node.ant_glob('**/*%s'%CACHE_SUFFIX,quiet=True) + if not lst: + raise Errors.WafError('The cache directory is empty: reconfigure the project') + for x in lst: + name=x.path_from(node).replace(CACHE_SUFFIX,'').replace('\\','/') + env=ConfigSet.ConfigSet(x.abspath()) + self.all_envs[name]=env + for f in env[CFG_FILES]: + newnode=self.root.find_resource(f) + try: + h=Utils.h_file(newnode.abspath()) + except(IOError,AttributeError): + Logs.error('cannot find %r'%f) + h=Utils.SIG_NIL + newnode.sig=h + def init_dirs(self): + if not(os.path.isabs(self.top_dir)and os.path.isabs(self.out_dir)): + raise Errors.WafError('The project was not configured: run "waf configure" first!') + self.path=self.srcnode=self.root.find_dir(self.top_dir) + self.bldnode=self.root.make_node(self.variant_dir) + self.bldnode.mkdir() + def execute(self): + self.restore() + if not self.all_envs: + self.load_envs() + self.execute_build() + def execute_build(self): + Logs.info("Waf: Entering directory `%s'"%self.variant_dir) + self.recurse([self.run_dir]) + self.pre_build() + self.timer=Utils.Timer() + if self.progress_bar: + sys.stderr.write(Logs.colors.cursor_off) + try: + self.compile() + finally: + if self.progress_bar==1: + c=len(self.returned_tasks)or 1 + self.to_log(self.progress_line(c,c,Logs.colors.BLUE,Logs.colors.NORMAL)) + print('') + sys.stdout.flush() + sys.stderr.write(Logs.colors.cursor_on) + Logs.info("Waf: Leaving directory `%s'"%self.variant_dir) + self.post_build() + def restore(self): + try: + env=ConfigSet.ConfigSet(os.path.join(self.cache_dir,'build.config.py')) + except(IOError,OSError): + pass + else: + if env['version']<Context.HEXVERSION: + raise Errors.WafError('Version mismatch! reconfigure the project') + for t in env['tools']: + self.setup(**t) + dbfn=os.path.join(self.variant_dir,Context.DBFILE) + try: + data=Utils.readf(dbfn,'rb') + except(IOError,EOFError): + Logs.debug('build: Could not load the build cache %s (missing)'%dbfn) + else: + try: + waflib.Node.pickle_lock.acquire() + waflib.Node.Nod3=self.node_class + try: + data=cPickle.loads(data) + except Exception ,e: + Logs.debug('build: Could not pickle the build cache %s: %r'%(dbfn,e)) + else: + for x in SAVED_ATTRS: + setattr(self,x,data[x]) + finally: + waflib.Node.pickle_lock.release() + self.init_dirs() + def store(self): + data={} + for x in SAVED_ATTRS: + data[x]=getattr(self,x) + db=os.path.join(self.variant_dir,Context.DBFILE) + try: + waflib.Node.pickle_lock.acquire() + waflib.Node.Nod3=self.node_class + x=cPickle.dumps(data,-1) + finally: + waflib.Node.pickle_lock.release() + Utils.writef(db+'.tmp',x,m='wb') + try: + st=os.stat(db) + os.remove(db) + if not Utils.is_win32: + os.chown(db+'.tmp',st.st_uid,st.st_gid) + except(AttributeError,OSError): + pass + os.rename(db+'.tmp',db) + def compile(self): + Logs.debug('build: compile()') + self.producer=Runner.Parallel(self,self.jobs) + self.producer.biter=self.get_build_iterator() + self.returned_tasks=[] + try: + self.producer.start() + except KeyboardInterrupt: + self.store() + raise + else: + if self.producer.dirty: + self.store() + if self.producer.error: + raise Errors.BuildError(self.producer.error) + def setup(self,tool,tooldir=None,funs=None): + if isinstance(tool,list): + for i in tool:self.setup(i,tooldir) + return + module=Context.load_tool(tool,tooldir) + if hasattr(module,"setup"):module.setup(self) + def get_env(self): + try: + return self.all_envs[self.variant] + except KeyError: + return self.all_envs[''] + def set_env(self,val): + self.all_envs[self.variant]=val + env=property(get_env,set_env) + def add_manual_dependency(self,path,value): + if path is None: + raise ValueError('Invalid input') + if isinstance(path,waflib.Node.Node): + node=path + elif os.path.isabs(path): + node=self.root.find_resource(path) + else: + node=self.path.find_resource(path) + if isinstance(value,list): + self.deps_man[id(node)].extend(value) + else: + self.deps_man[id(node)].append(value) + def launch_node(self): + try: + return self.p_ln + except AttributeError: + self.p_ln=self.root.find_dir(self.launch_dir) + return self.p_ln + def hash_env_vars(self,env,vars_lst): + if not env.table: + env=env.parent + if not env: + return Utils.SIG_NIL + idx=str(id(env))+str(vars_lst) + try: + cache=self.cache_env + except AttributeError: + cache=self.cache_env={} + else: + try: + return self.cache_env[idx] + except KeyError: + pass + lst=[env[a]for a in vars_lst] + ret=Utils.h_list(lst) + Logs.debug('envhash: %s %r',Utils.to_hex(ret),lst) + cache[idx]=ret + return ret + def get_tgen_by_name(self,name): + cache=self.task_gen_cache_names + if not cache: + for g in self.groups: + for tg in g: + try: + cache[tg.name]=tg + except AttributeError: + pass + try: + return cache[name] + except KeyError: + raise Errors.WafError('Could not find a task generator for the name %r'%name) + def progress_line(self,state,total,col1,col2): + n=len(str(total)) + Utils.rot_idx+=1 + ind=Utils.rot_chr[Utils.rot_idx%4] + pc=(100.*state)/total + eta=str(self.timer) + fs="[%%%dd/%%%dd][%%s%%2d%%%%%%s][%s]["%(n,n,ind) + left=fs%(state,total,col1,pc,col2) + right='][%s%s%s]'%(col1,eta,col2) + cols=Logs.get_term_cols()-len(left)-len(right)+2*len(col1)+2*len(col2) + if cols<7:cols=7 + ratio=((cols*state)//total)-1 + bar=('='*ratio+'>').ljust(cols) + msg=Utils.indicator%(left,bar,right) + return msg + def declare_chain(self,*k,**kw): + return TaskGen.declare_chain(*k,**kw) + def pre_build(self): + for m in getattr(self,'pre_funs',[]): + m(self) + def post_build(self): + for m in getattr(self,'post_funs',[]): + m(self) + def add_pre_fun(self,meth): + try: + self.pre_funs.append(meth) + except AttributeError: + self.pre_funs=[meth] + def add_post_fun(self,meth): + try: + self.post_funs.append(meth) + except AttributeError: + self.post_funs=[meth] + def get_group(self,x): + if not self.groups: + self.add_group() + if x is None: + return self.groups[self.current_group] + if x in self.group_names: + return self.group_names[x] + return self.groups[x] + def add_to_group(self,tgen,group=None): + assert(isinstance(tgen,TaskGen.task_gen)or isinstance(tgen,Task.TaskBase)) + tgen.bld=self + self.get_group(group).append(tgen) + def get_group_name(self,g): + if not isinstance(g,list): + g=self.groups[g] + for x in self.group_names: + if id(self.group_names[x])==id(g): + return x + return'' + def get_group_idx(self,tg): + se=id(tg) + for i in range(len(self.groups)): + for t in self.groups[i]: + if id(t)==se: + return i + return None + def add_group(self,name=None,move=True): + if name and name in self.group_names: + Logs.error('add_group: name %s already present'%name) + g=[] + self.group_names[name]=g + self.groups.append(g) + if move: + self.current_group=len(self.groups)-1 + def set_group(self,idx): + if isinstance(idx,str): + g=self.group_names[idx] + for i in range(len(self.groups)): + if id(g)==id(self.groups[i]): + self.current_group=i + else: + self.current_group=idx + def total(self): + total=0 + for group in self.groups: + for tg in group: + try: + total+=len(tg.tasks) + except AttributeError: + total+=1 + return total + def get_targets(self): + to_post=[] + min_grp=0 + for name in self.targets.split(','): + tg=self.get_tgen_by_name(name) + if not tg: + raise Errors.WafError('target %r does not exist'%name) + m=self.get_group_idx(tg) + if m>min_grp: + min_grp=m + to_post=[tg] + elif m==min_grp: + to_post.append(tg) + return(min_grp,to_post) + def get_all_task_gen(self): + lst=[] + for g in self.groups: + lst.extend(g) + return lst + def post_group(self): + if self.targets=='*': + for tg in self.groups[self.cur]: + try: + f=tg.post + except AttributeError: + pass + else: + f() + elif self.targets: + if self.cur<self._min_grp: + for tg in self.groups[self.cur]: + try: + f=tg.post + except AttributeError: + pass + else: + f() + else: + for tg in self._exact_tg: + tg.post() + else: + ln=self.launch_node() + if ln.is_child_of(self.bldnode): + Logs.warn('Building from the build directory, forcing --targets=*') + ln=self.srcnode + elif not ln.is_child_of(self.srcnode): + Logs.warn('CWD %s is not under %s, forcing --targets=* (run distclean?)'%(ln.abspath(),self.srcnode.abspath())) + ln=self.srcnode + for tg in self.groups[self.cur]: + try: + f=tg.post + except AttributeError: + pass + else: + if tg.path.is_child_of(ln): + f() + def get_tasks_group(self,idx): + tasks=[] + for tg in self.groups[idx]: + try: + tasks.extend(tg.tasks) + except AttributeError: + tasks.append(tg) + return tasks + def get_build_iterator(self): + self.cur=0 + if self.targets and self.targets!='*': + (self._min_grp,self._exact_tg)=self.get_targets() + global lazy_post + if self.post_mode!=POST_LAZY: + while self.cur<len(self.groups): + self.post_group() + self.cur+=1 + self.cur=0 + while self.cur<len(self.groups): + if self.post_mode!=POST_AT_ONCE: + self.post_group() + tasks=self.get_tasks_group(self.cur) + Task.set_file_constraints(tasks) + Task.set_precedence_constraints(tasks) + self.cur_tasks=tasks + self.cur+=1 + if not tasks: + continue + yield tasks + while 1: + yield[] +class inst(Task.Task): + color='CYAN' + def uid(self): + lst=[self.dest,self.path]+self.source + return Utils.h_list(repr(lst)) + def post(self): + buf=[] + for x in self.source: + if isinstance(x,waflib.Node.Node): + y=x + else: + y=self.path.find_resource(x) + if not y: + if Logs.verbose: + Logs.warn('Could not find %s immediately (may cause broken builds)'%x) + idx=self.generator.bld.get_group_idx(self) + for tg in self.generator.bld.groups[idx]: + if not isinstance(tg,inst)and id(tg)!=id(self): + tg.post() + y=self.path.find_resource(x) + if y: + break + else: + raise Errors.WafError('Could not find %r in %r'%(x,self.path)) + buf.append(y) + self.inputs=buf + def runnable_status(self): + ret=super(inst,self).runnable_status() + if ret==Task.SKIP_ME: + return Task.RUN_ME + return ret + def __str__(self): + return'' + def run(self): + return self.generator.exec_task() + def get_install_path(self,destdir=True): + dest=Utils.subst_vars(self.dest,self.env) + dest=dest.replace('/',os.sep) + if destdir and Options.options.destdir: + dest=os.path.join(Options.options.destdir,os.path.splitdrive(dest)[1].lstrip(os.sep)) + return dest + def exec_install_files(self): + destpath=self.get_install_path() + if not destpath: + raise Errors.WafError('unknown installation path %r'%self.generator) + for x,y in zip(self.source,self.inputs): + if self.relative_trick: + destfile=os.path.join(destpath,y.path_from(self.path)) + else: + destfile=os.path.join(destpath,y.name) + self.generator.bld.do_install(y.abspath(),destfile,self.chmod) + def exec_install_as(self): + destfile=self.get_install_path() + self.generator.bld.do_install(self.inputs[0].abspath(),destfile,self.chmod) + def exec_symlink_as(self): + destfile=self.get_install_path() + src=self.link + if self.relative_trick: + src=os.path.relpath(src,os.path.dirname(destfile)) + self.generator.bld.do_link(src,destfile) +class InstallContext(BuildContext): + '''installs the targets on the system''' + cmd='install' + def __init__(self,**kw): + super(InstallContext,self).__init__(**kw) + self.uninstall=[] + self.is_install=INSTALL + def do_install(self,src,tgt,chmod=Utils.O644): + d,_=os.path.split(tgt) + if not d: + raise Errors.WafError('Invalid installation given %r->%r'%(src,tgt)) + Utils.check_dir(d) + srclbl=src.replace(self.srcnode.abspath()+os.sep,'') + if not Options.options.force: + try: + st1=os.stat(tgt) + st2=os.stat(src) + except OSError: + pass + else: + if st1.st_mtime+2>=st2.st_mtime and st1.st_size==st2.st_size: + if not self.progress_bar: + Logs.info('- install %s (from %s)'%(tgt,srclbl)) + return False + if not self.progress_bar: + Logs.info('+ install %s (from %s)'%(tgt,srclbl)) + try: + os.remove(tgt) + except OSError: + pass + try: + shutil.copy2(src,tgt) + os.chmod(tgt,chmod) + except IOError: + try: + os.stat(src) + except(OSError,IOError): + Logs.error('File %r does not exist'%src) + raise Errors.WafError('Could not install the file %r'%tgt) + def do_link(self,src,tgt): + d,_=os.path.split(tgt) + Utils.check_dir(d) + link=False + if not os.path.islink(tgt): + link=True + elif os.readlink(tgt)!=src: + link=True + if link: + try:os.remove(tgt) + except OSError:pass + if not self.progress_bar: + Logs.info('+ symlink %s (to %s)'%(tgt,src)) + os.symlink(src,tgt) + else: + if not self.progress_bar: + Logs.info('- symlink %s (to %s)'%(tgt,src)) + def run_task_now(self,tsk,postpone): + tsk.post() + if not postpone: + if tsk.runnable_status()==Task.ASK_LATER: + raise self.WafError('cannot post the task %r'%tsk) + tsk.run() + def install_files(self,dest,files,env=None,chmod=Utils.O644,relative_trick=False,cwd=None,add=True,postpone=True): + tsk=inst(env=env or self.env) + tsk.bld=self + tsk.path=cwd or self.path + tsk.chmod=chmod + if isinstance(files,waflib.Node.Node): + tsk.source=[files] + else: + tsk.source=Utils.to_list(files) + tsk.dest=dest + tsk.exec_task=tsk.exec_install_files + tsk.relative_trick=relative_trick + if add:self.add_to_group(tsk) + self.run_task_now(tsk,postpone) + return tsk + def install_as(self,dest,srcfile,env=None,chmod=Utils.O644,cwd=None,add=True,postpone=True): + tsk=inst(env=env or self.env) + tsk.bld=self + tsk.path=cwd or self.path + tsk.chmod=chmod + tsk.source=[srcfile] + tsk.dest=dest + tsk.exec_task=tsk.exec_install_as + if add:self.add_to_group(tsk) + self.run_task_now(tsk,postpone) + return tsk + def symlink_as(self,dest,src,env=None,cwd=None,add=True,postpone=True,relative_trick=False): + if Utils.is_win32: + return + tsk=inst(env=env or self.env) + tsk.bld=self + tsk.dest=dest + tsk.path=cwd or self.path + tsk.source=[] + tsk.link=src + tsk.relative_trick=relative_trick + tsk.exec_task=tsk.exec_symlink_as + if add:self.add_to_group(tsk) + self.run_task_now(tsk,postpone) + return tsk +class UninstallContext(InstallContext): + '''removes the targets installed''' + cmd='uninstall' + def __init__(self,**kw): + super(UninstallContext,self).__init__(**kw) + self.is_install=UNINSTALL + def do_install(self,src,tgt,chmod=Utils.O644): + if not self.progress_bar: + Logs.info('- remove %s'%tgt) + self.uninstall.append(tgt) + try: + os.remove(tgt) + except OSError ,e: + if e.errno!=errno.ENOENT: + if not getattr(self,'uninstall_error',None): + self.uninstall_error=True + Logs.warn('build: some files could not be uninstalled (retry with -vv to list them)') + if Logs.verbose>1: + Logs.warn('Could not remove %s (error code %r)'%(e.filename,e.errno)) + while tgt: + tgt=os.path.dirname(tgt) + try: + os.rmdir(tgt) + except OSError: + break + def do_link(self,src,tgt): + try: + if not self.progress_bar: + Logs.info('- remove %s'%tgt) + os.remove(tgt) + except OSError: + pass + while tgt: + tgt=os.path.dirname(tgt) + try: + os.rmdir(tgt) + except OSError: + break + def execute(self): + try: + def runnable_status(self): + return Task.SKIP_ME + setattr(Task.Task,'runnable_status_back',Task.Task.runnable_status) + setattr(Task.Task,'runnable_status',runnable_status) + super(UninstallContext,self).execute() + finally: + setattr(Task.Task,'runnable_status',Task.Task.runnable_status_back) +class CleanContext(BuildContext): + '''cleans the project''' + cmd='clean' + def execute(self): + self.restore() + if not self.all_envs: + self.load_envs() + self.recurse([self.run_dir]) + try: + self.clean() + finally: + self.store() + def clean(self): + Logs.debug('build: clean called') + if self.bldnode!=self.srcnode: + lst=[] + for e in self.all_envs.values(): + lst.extend(self.root.find_or_declare(f)for f in e[CFG_FILES]) + for n in self.bldnode.ant_glob('**/*',excl='.lock* *conf_check_*/** config.log c4che/*',quiet=True): + if n in lst: + continue + n.delete() + self.root.children={} + for v in'node_deps task_sigs raw_deps'.split(): + setattr(self,v,{}) +class ListContext(BuildContext): + '''lists the targets to execute''' + cmd='list' + def execute(self): + self.restore() + if not self.all_envs: + self.load_envs() + self.recurse([self.run_dir]) + self.pre_build() + self.timer=Utils.Timer() + for g in self.groups: + for tg in g: + try: + f=tg.post + except AttributeError: + pass + else: + f() + try: + self.get_tgen_by_name('') + except Exception: + pass + lst=list(self.task_gen_cache_names.keys()) + lst.sort() + for k in lst: + Logs.pprint('GREEN',k) +class StepContext(BuildContext): + '''executes tasks in a step-by-step fashion, for debugging''' + cmd='step' + def __init__(self,**kw): + super(StepContext,self).__init__(**kw) + self.files=Options.options.files + def compile(self): + if not self.files: + Logs.warn('Add a pattern for the debug build, for example "waf step --files=main.c,app"') + BuildContext.compile(self) + return + targets=None + if self.targets and self.targets!='*': + targets=self.targets.split(',') + for g in self.groups: + for tg in g: + if targets and tg.name not in targets: + continue + try: + f=tg.post + except AttributeError: + pass + else: + f() + for pat in self.files.split(','): + matcher=self.get_matcher(pat) + for tg in g: + if isinstance(tg,Task.TaskBase): + lst=[tg] + else: + lst=tg.tasks + for tsk in lst: + do_exec=False + for node in getattr(tsk,'inputs',[]): + if matcher(node,output=False): + do_exec=True + break + for node in getattr(tsk,'outputs',[]): + if matcher(node,output=True): + do_exec=True + break + if do_exec: + ret=tsk.run() + Logs.info('%s -> exit %r'%(str(tsk),ret)) + def get_matcher(self,pat): + inn=True + out=True + if pat.startswith('in:'): + out=False + pat=pat.replace('in:','') + elif pat.startswith('out:'): + inn=False + pat=pat.replace('out:','') + anode=self.root.find_node(pat) + pattern=None + if not anode: + if not pat.startswith('^'): + pat='^.+?%s'%pat + if not pat.endswith('$'): + pat='%s$'%pat + pattern=re.compile(pat) + def match(node,output): + if output==True and not out: + return False + if output==False and not inn: + return False + if anode: + return anode==node + else: + return pattern.match(node.abspath()) + return match +BuildContext.store=Utils.nogc(BuildContext.store) +BuildContext.restore=Utils.nogc(BuildContext.restore) diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Build.pyc b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Build.pyc Binary files differnew file mode 100644 index 0000000..62c3f92 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Build.pyc diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/ConfigSet.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/ConfigSet.py new file mode 100644 index 0000000..f9fdcc7 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/ConfigSet.py @@ -0,0 +1,152 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import copy,re,os +from waflib import Logs,Utils +re_imp=re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$',re.M) +class ConfigSet(object): + __slots__=('table','parent') + def __init__(self,filename=None): + self.table={} + if filename: + self.load(filename) + def __contains__(self,key): + if key in self.table:return True + try:return self.parent.__contains__(key) + except AttributeError:return False + def keys(self): + keys=set() + cur=self + while cur: + keys.update(cur.table.keys()) + cur=getattr(cur,'parent',None) + keys=list(keys) + keys.sort() + return keys + def __str__(self): + return"\n".join(["%r %r"%(x,self.__getitem__(x))for x in self.keys()]) + def __getitem__(self,key): + try: + while 1: + x=self.table.get(key,None) + if not x is None: + return x + self=self.parent + except AttributeError: + return[] + def __setitem__(self,key,value): + self.table[key]=value + def __delitem__(self,key): + self[key]=[] + def __getattr__(self,name): + if name in self.__slots__: + return object.__getattr__(self,name) + else: + return self[name] + def __setattr__(self,name,value): + if name in self.__slots__: + object.__setattr__(self,name,value) + else: + self[name]=value + def __delattr__(self,name): + if name in self.__slots__: + object.__delattr__(self,name) + else: + del self[name] + def derive(self): + newenv=ConfigSet() + newenv.parent=self + return newenv + def detach(self): + tbl=self.get_merged_dict() + try: + delattr(self,'parent') + except AttributeError: + pass + else: + keys=tbl.keys() + for x in keys: + tbl[x]=copy.deepcopy(tbl[x]) + self.table=tbl + def get_flat(self,key): + s=self[key] + if isinstance(s,str):return s + return' '.join(s) + def _get_list_value_for_modification(self,key): + try: + value=self.table[key] + except KeyError: + try:value=self.parent[key] + except AttributeError:value=[] + if isinstance(value,list): + value=value[:] + else: + value=[value] + else: + if not isinstance(value,list): + value=[value] + self.table[key]=value + return value + def append_value(self,var,val): + current_value=self._get_list_value_for_modification(var) + if isinstance(val,str): + val=[val] + current_value.extend(val) + def prepend_value(self,var,val): + if isinstance(val,str): + val=[val] + self.table[var]=val+self._get_list_value_for_modification(var) + def append_unique(self,var,val): + if isinstance(val,str): + val=[val] + current_value=self._get_list_value_for_modification(var) + for x in val: + if x not in current_value: + current_value.append(x) + def get_merged_dict(self): + table_list=[] + env=self + while 1: + table_list.insert(0,env.table) + try:env=env.parent + except AttributeError:break + merged_table={} + for table in table_list: + merged_table.update(table) + return merged_table + def store(self,filename): + try: + os.makedirs(os.path.split(filename)[0]) + except OSError: + pass + buf=[] + merged_table=self.get_merged_dict() + keys=list(merged_table.keys()) + keys.sort() + try: + fun=ascii + except NameError: + fun=repr + for k in keys: + if k!='undo_stack': + buf.append('%s = %s\n'%(k,fun(merged_table[k]))) + Utils.writef(filename,''.join(buf)) + def load(self,filename): + tbl=self.table + code=Utils.readf(filename,m='rU') + for m in re_imp.finditer(code): + g=m.group + tbl[g(2)]=eval(g(3)) + Logs.debug('env: %s'%str(self.table)) + def update(self,d): + for k,v in d.items(): + self[k]=v + def stash(self): + orig=self.table + tbl=self.table=self.table.copy() + for x in tbl.keys(): + tbl[x]=copy.deepcopy(tbl[x]) + self.undo_stack=self.undo_stack+[orig] + def revert(self): + self.table=self.undo_stack.pop(-1) diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/ConfigSet.pyc b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/ConfigSet.pyc Binary files differnew file mode 100644 index 0000000..87e1e6e --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/ConfigSet.pyc diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Configure.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Configure.py new file mode 100644 index 0000000..2814e61 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Configure.py @@ -0,0 +1,317 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,shlex,sys,time +from waflib import ConfigSet,Utils,Options,Logs,Context,Build,Errors +try: + from urllib import request +except ImportError: + from urllib import urlopen +else: + urlopen=request.urlopen +BREAK='break' +CONTINUE='continue' +WAF_CONFIG_LOG='config.log' +autoconfig=False +conf_template='''# project %(app)s configured on %(now)s by +# waf %(wafver)s (abi %(abi)s, python %(pyver)x on %(systype)s) +# using %(args)s +#''' +def download_check(node): + pass +def download_tool(tool,force=False,ctx=None): + for x in Utils.to_list(Context.remote_repo): + for sub in Utils.to_list(Context.remote_locs): + url='/'.join((x,sub,tool+'.py')) + try: + web=urlopen(url) + try: + if web.getcode()!=200: + continue + except AttributeError: + pass + except Exception: + continue + else: + tmp=ctx.root.make_node(os.sep.join((Context.waf_dir,'waflib','extras',tool+'.py'))) + tmp.write(web.read(),'wb') + Logs.warn('Downloaded %s from %s'%(tool,url)) + download_check(tmp) + try: + module=Context.load_tool(tool) + except Exception: + Logs.warn('The tool %s from %s is unusable'%(tool,url)) + try: + tmp.delete() + except Exception: + pass + continue + return module + raise Errors.WafError('Could not load the Waf tool') +class ConfigurationContext(Context.Context): + '''configures the project''' + cmd='configure' + error_handlers=[] + def __init__(self,**kw): + super(ConfigurationContext,self).__init__(**kw) + self.environ=dict(os.environ) + self.all_envs={} + self.top_dir=None + self.out_dir=None + self.tools=[] + self.hash=0 + self.files=[] + self.tool_cache=[] + self.setenv('') + def setenv(self,name,env=None): + if name not in self.all_envs or env: + if not env: + env=ConfigSet.ConfigSet() + self.prepare_env(env) + else: + env=env.derive() + self.all_envs[name]=env + self.variant=name + def get_env(self): + return self.all_envs[self.variant] + def set_env(self,val): + self.all_envs[self.variant]=val + env=property(get_env,set_env) + def init_dirs(self): + top=self.top_dir + if not top: + top=Options.options.top + if not top: + top=getattr(Context.g_module,Context.TOP,None) + if not top: + top=self.path.abspath() + top=os.path.abspath(top) + self.srcnode=(os.path.isabs(top)and self.root or self.path).find_dir(top) + assert(self.srcnode) + out=self.out_dir + if not out: + out=Options.options.out + if not out: + out=getattr(Context.g_module,Context.OUT,None) + if not out: + out=Options.lockfile.replace('.lock-waf_%s_'%sys.platform,'').replace('.lock-waf','') + self.bldnode=(os.path.isabs(out)and self.root or self.path).make_node(out) + self.bldnode.mkdir() + if not os.path.isdir(self.bldnode.abspath()): + conf.fatal('Could not create the build directory %s'%self.bldnode.abspath()) + def execute(self): + self.init_dirs() + self.cachedir=self.bldnode.make_node(Build.CACHE_DIR) + self.cachedir.mkdir() + path=os.path.join(self.bldnode.abspath(),WAF_CONFIG_LOG) + self.logger=Logs.make_logger(path,'cfg') + app=getattr(Context.g_module,'APPNAME','') + if app: + ver=getattr(Context.g_module,'VERSION','') + if ver: + app="%s (%s)"%(app,ver) + now=time.ctime() + pyver=sys.hexversion + systype=sys.platform + args=" ".join(sys.argv) + wafver=Context.WAFVERSION + abi=Context.ABI + self.to_log(conf_template%vars()) + self.msg('Setting top to',self.srcnode.abspath()) + self.msg('Setting out to',self.bldnode.abspath()) + if id(self.srcnode)==id(self.bldnode): + Logs.warn('Setting top == out (remember to use "update_outputs")') + elif id(self.path)!=id(self.srcnode): + if self.srcnode.is_child_of(self.path): + Logs.warn('Are you certain that you do not want to set top="." ?') + super(ConfigurationContext,self).execute() + self.store() + Context.top_dir=self.srcnode.abspath() + Context.out_dir=self.bldnode.abspath() + env=ConfigSet.ConfigSet() + env['argv']=sys.argv + env['options']=Options.options.__dict__ + env.run_dir=Context.run_dir + env.top_dir=Context.top_dir + env.out_dir=Context.out_dir + env['hash']=self.hash + env['files']=self.files + env['environ']=dict(self.environ) + if not self.env.NO_LOCK_IN_RUN: + env.store(Context.run_dir+os.sep+Options.lockfile) + if not self.env.NO_LOCK_IN_TOP: + env.store(Context.top_dir+os.sep+Options.lockfile) + if not self.env.NO_LOCK_IN_OUT: + env.store(Context.out_dir+os.sep+Options.lockfile) + def prepare_env(self,env): + if not env.PREFIX: + if Options.options.prefix or Utils.is_win32: + env.PREFIX=os.path.abspath(os.path.expanduser(Options.options.prefix)) + else: + env.PREFIX='' + if not env.BINDIR: + env.BINDIR=Utils.subst_vars('${PREFIX}/bin',env) + if not env.LIBDIR: + env.LIBDIR=Utils.subst_vars('${PREFIX}/lib',env) + def store(self): + n=self.cachedir.make_node('build.config.py') + n.write('version = 0x%x\ntools = %r\n'%(Context.HEXVERSION,self.tools)) + if not self.all_envs: + self.fatal('nothing to store in the configuration context!') + for key in self.all_envs: + tmpenv=self.all_envs[key] + tmpenv.store(os.path.join(self.cachedir.abspath(),key+Build.CACHE_SUFFIX)) + def load(self,input,tooldir=None,funs=None,download=True): + tools=Utils.to_list(input) + if tooldir:tooldir=Utils.to_list(tooldir) + for tool in tools: + mag=(tool,id(self.env),funs) + if mag in self.tool_cache: + self.to_log('(tool %s is already loaded, skipping)'%tool) + continue + self.tool_cache.append(mag) + module=None + try: + module=Context.load_tool(tool,tooldir) + except ImportError ,e: + if Options.options.download: + module=download_tool(tool,ctx=self) + if not module: + self.fatal('Could not load the Waf tool %r or download a suitable replacement from the repository (sys.path %r)\n%s'%(tool,sys.path,e)) + else: + self.fatal('Could not load the Waf tool %r from %r (try the --download option?):\n%s'%(tool,sys.path,e)) + except Exception ,e: + self.to_log('imp %r (%r & %r)'%(tool,tooldir,funs)) + self.to_log(Utils.ex_stack()) + raise + if funs is not None: + self.eval_rules(funs) + else: + func=getattr(module,'configure',None) + if func: + if type(func)is type(Utils.readf):func(self) + else:self.eval_rules(func) + self.tools.append({'tool':tool,'tooldir':tooldir,'funs':funs}) + def post_recurse(self,node): + super(ConfigurationContext,self).post_recurse(node) + self.hash=Utils.h_list((self.hash,node.read('rb'))) + self.files.append(node.abspath()) + def eval_rules(self,rules): + self.rules=Utils.to_list(rules) + for x in self.rules: + f=getattr(self,x) + if not f:self.fatal("No such method '%s'."%x) + try: + f() + except Exception ,e: + ret=self.err_handler(x,e) + if ret==BREAK: + break + elif ret==CONTINUE: + continue + else: + raise + def err_handler(self,fun,error): + pass +def conf(f): + def fun(*k,**kw): + mandatory=True + if'mandatory'in kw: + mandatory=kw['mandatory'] + del kw['mandatory'] + try: + return f(*k,**kw) + except Errors.ConfigurationError: + if mandatory: + raise + setattr(ConfigurationContext,f.__name__,fun) + setattr(Build.BuildContext,f.__name__,fun) + return f +@conf +def add_os_flags(self,var,dest=None): + try:self.env.append_value(dest or var,shlex.split(self.environ[var])) + except KeyError:pass +@conf +def cmd_to_list(self,cmd): + if isinstance(cmd,str)and cmd.find(' '): + try: + os.stat(cmd) + except OSError: + return shlex.split(cmd) + else: + return[cmd] + return cmd +@conf +def check_waf_version(self,mini='1.6.99',maxi='1.8.0'): + self.start_msg('Checking for waf version in %s-%s'%(str(mini),str(maxi))) + ver=Context.HEXVERSION + if Utils.num2ver(mini)>ver: + self.fatal('waf version should be at least %r (%r found)'%(Utils.num2ver(mini),ver)) + if Utils.num2ver(maxi)<ver: + self.fatal('waf version should be at most %r (%r found)'%(Utils.num2ver(maxi),ver)) + self.end_msg('ok') +@conf +def find_file(self,filename,path_list=[]): + for n in Utils.to_list(filename): + for d in Utils.to_list(path_list): + p=os.path.join(d,n) + if os.path.exists(p): + return p + self.fatal('Could not find %r'%filename) +@conf +def find_program(self,filename,**kw): + exts=kw.get('exts',Utils.is_win32 and'.exe,.com,.bat,.cmd'or',.sh,.pl,.py') + environ=kw.get('environ',os.environ) + ret='' + filename=Utils.to_list(filename) + var=kw.get('var','') + if not var: + var=filename[0].upper() + if self.env[var]: + ret=self.env[var] + elif var in environ: + ret=environ[var] + path_list=kw.get('path_list','') + if not ret: + if path_list: + path_list=Utils.to_list(path_list) + else: + path_list=environ.get('PATH','').split(os.pathsep) + if not isinstance(filename,list): + filename=[filename] + for a in exts.split(','): + if ret: + break + for b in filename: + if ret: + break + for c in path_list: + if ret: + break + x=os.path.expanduser(os.path.join(c,b+a)) + if os.path.isfile(x): + ret=x + if not ret and Utils.winreg: + ret=Utils.get_registry_app_path(Utils.winreg.HKEY_CURRENT_USER,filename) + if not ret and Utils.winreg: + ret=Utils.get_registry_app_path(Utils.winreg.HKEY_LOCAL_MACHINE,filename) + self.msg('Checking for program '+','.join(filename),ret or False) + self.to_log('find program=%r paths=%r var=%r -> %r'%(filename,path_list,var,ret)) + if not ret: + self.fatal(kw.get('errmsg','')or'Could not find the program %s'%','.join(filename)) + if var: + self.env[var]=ret + return ret +@conf +def find_perl_program(self,filename,path_list=[],var=None,environ=None,exts=''): + try: + app=self.find_program(filename,path_list=path_list,var=var,environ=environ,exts=exts) + except Exception: + self.find_program('perl',var='PERL') + app=self.find_file(filename,os.environ['PATH'].split(os.pathsep)) + if not app: + raise + if var: + self.env[var]=Utils.to_list(self.env['PERL'])+[app] + self.msg('Checking for %r'%filename,app) diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Configure.pyc b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Configure.pyc Binary files differnew file mode 100644 index 0000000..774c052 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Configure.pyc diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Context.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Context.py new file mode 100644 index 0000000..5db55dc --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Context.py @@ -0,0 +1,316 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,imp,sys +from waflib import Utils,Errors,Logs +import waflib.Node +HEXVERSION=0x1071000 +WAFVERSION="1.7.16" +WAFREVISION="73c1705078f8c9c51a33e20f221a309d5a94b5e1" +ABI=98 +DBFILE='.wafpickle-%s-%d-%d'%(sys.platform,sys.hexversion,ABI) +APPNAME='APPNAME' +VERSION='VERSION' +TOP='top' +OUT='out' +WSCRIPT_FILE='wscript' +launch_dir='' +run_dir='' +top_dir='' +out_dir='' +waf_dir='' +local_repo='' +remote_repo='http://waf.googlecode.com/git/' +remote_locs=['waflib/extras','waflib/Tools'] +g_module=None +STDOUT=1 +STDERR=-1 +BOTH=0 +classes=[] +def create_context(cmd_name,*k,**kw): + global classes + for x in classes: + if x.cmd==cmd_name: + return x(*k,**kw) + ctx=Context(*k,**kw) + ctx.fun=cmd_name + return ctx +class store_context(type): + def __init__(cls,name,bases,dict): + super(store_context,cls).__init__(name,bases,dict) + name=cls.__name__ + if name=='ctx'or name=='Context': + return + try: + cls.cmd + except AttributeError: + raise Errors.WafError('Missing command for the context class %r (cmd)'%name) + if not getattr(cls,'fun',None): + cls.fun=cls.cmd + global classes + classes.insert(0,cls) +ctx=store_context('ctx',(object,),{}) +class Context(ctx): + errors=Errors + tools={} + def __init__(self,**kw): + try: + rd=kw['run_dir'] + except KeyError: + global run_dir + rd=run_dir + self.node_class=type("Nod3",(waflib.Node.Node,),{}) + self.node_class.__module__="waflib.Node" + self.node_class.ctx=self + self.root=self.node_class('',None) + self.cur_script=None + self.path=self.root.find_dir(rd) + self.stack_path=[] + self.exec_dict={'ctx':self,'conf':self,'bld':self,'opt':self} + self.logger=None + def __hash__(self): + return id(self) + def load(self,tool_list,*k,**kw): + tools=Utils.to_list(tool_list) + path=Utils.to_list(kw.get('tooldir','')) + for t in tools: + module=load_tool(t,path) + fun=getattr(module,kw.get('name',self.fun),None) + if fun: + fun(self) + def execute(self): + global g_module + self.recurse([os.path.dirname(g_module.root_path)]) + def pre_recurse(self,node): + self.stack_path.append(self.cur_script) + self.cur_script=node + self.path=node.parent + def post_recurse(self,node): + self.cur_script=self.stack_path.pop() + if self.cur_script: + self.path=self.cur_script.parent + def recurse(self,dirs,name=None,mandatory=True,once=True): + try: + cache=self.recurse_cache + except AttributeError: + cache=self.recurse_cache={} + for d in Utils.to_list(dirs): + if not os.path.isabs(d): + d=os.path.join(self.path.abspath(),d) + WSCRIPT=os.path.join(d,WSCRIPT_FILE) + WSCRIPT_FUN=WSCRIPT+'_'+(name or self.fun) + node=self.root.find_node(WSCRIPT_FUN) + if node and(not once or node not in cache): + cache[node]=True + self.pre_recurse(node) + try: + function_code=node.read('rU') + exec(compile(function_code,node.abspath(),'exec'),self.exec_dict) + finally: + self.post_recurse(node) + elif not node: + node=self.root.find_node(WSCRIPT) + tup=(node,name or self.fun) + if node and(not once or tup not in cache): + cache[tup]=True + self.pre_recurse(node) + try: + wscript_module=load_module(node.abspath()) + user_function=getattr(wscript_module,(name or self.fun),None) + if not user_function: + if not mandatory: + continue + raise Errors.WafError('No function %s defined in %s'%(name or self.fun,node.abspath())) + user_function(self) + finally: + self.post_recurse(node) + elif not node: + if not mandatory: + continue + raise Errors.WafError('No wscript file in directory %s'%d) + def exec_command(self,cmd,**kw): + subprocess=Utils.subprocess + kw['shell']=isinstance(cmd,str) + Logs.debug('runner: %r'%cmd) + Logs.debug('runner_env: kw=%s'%kw) + if self.logger: + self.logger.info(cmd) + if'stdout'not in kw: + kw['stdout']=subprocess.PIPE + if'stderr'not in kw: + kw['stderr']=subprocess.PIPE + try: + if kw['stdout']or kw['stderr']: + p=subprocess.Popen(cmd,**kw) + (out,err)=p.communicate() + ret=p.returncode + else: + out,err=(None,None) + ret=subprocess.Popen(cmd,**kw).wait() + except Exception ,e: + raise Errors.WafError('Execution failure: %s'%str(e),ex=e) + if out: + if not isinstance(out,str): + out=out.decode(sys.stdout.encoding or'iso8859-1') + if self.logger: + self.logger.debug('out: %s'%out) + else: + sys.stdout.write(out) + if err: + if not isinstance(err,str): + err=err.decode(sys.stdout.encoding or'iso8859-1') + if self.logger: + self.logger.error('err: %s'%err) + else: + sys.stderr.write(err) + return ret + def cmd_and_log(self,cmd,**kw): + subprocess=Utils.subprocess + kw['shell']=isinstance(cmd,str) + Logs.debug('runner: %r'%cmd) + if'quiet'in kw: + quiet=kw['quiet'] + del kw['quiet'] + else: + quiet=None + if'output'in kw: + to_ret=kw['output'] + del kw['output'] + else: + to_ret=STDOUT + kw['stdout']=kw['stderr']=subprocess.PIPE + if quiet is None: + self.to_log(cmd) + try: + p=subprocess.Popen(cmd,**kw) + (out,err)=p.communicate() + except Exception ,e: + raise Errors.WafError('Execution failure: %s'%str(e),ex=e) + if not isinstance(out,str): + out=out.decode(sys.stdout.encoding or'iso8859-1') + if not isinstance(err,str): + err=err.decode(sys.stdout.encoding or'iso8859-1') + if out and quiet!=STDOUT and quiet!=BOTH: + self.to_log('out: %s'%out) + if err and quiet!=STDERR and quiet!=BOTH: + self.to_log('err: %s'%err) + if p.returncode: + e=Errors.WafError('Command %r returned %r'%(cmd,p.returncode)) + e.returncode=p.returncode + e.stderr=err + e.stdout=out + raise e + if to_ret==BOTH: + return(out,err) + elif to_ret==STDERR: + return err + return out + def fatal(self,msg,ex=None): + if self.logger: + self.logger.info('from %s: %s'%(self.path.abspath(),msg)) + try: + msg='%s\n(complete log in %s)'%(msg,self.logger.handlers[0].baseFilename) + except Exception: + pass + raise self.errors.ConfigurationError(msg,ex=ex) + def to_log(self,msg): + if not msg: + return + if self.logger: + self.logger.info(msg) + else: + sys.stderr.write(str(msg)) + sys.stderr.flush() + def msg(self,msg,result,color=None): + self.start_msg(msg) + if not isinstance(color,str): + color=result and'GREEN'or'YELLOW' + self.end_msg(result,color) + def start_msg(self,msg): + try: + if self.in_msg: + self.in_msg+=1 + return + except AttributeError: + self.in_msg=0 + self.in_msg+=1 + try: + self.line_just=max(self.line_just,len(msg)) + except AttributeError: + self.line_just=max(40,len(msg)) + for x in(self.line_just*'-',msg): + self.to_log(x) + Logs.pprint('NORMAL',"%s :"%msg.ljust(self.line_just),sep='') + def end_msg(self,result,color=None): + self.in_msg-=1 + if self.in_msg: + return + defcolor='GREEN' + if result==True: + msg='ok' + elif result==False: + msg='not found' + defcolor='YELLOW' + else: + msg=str(result) + self.to_log(msg) + Logs.pprint(color or defcolor,msg) + def load_special_tools(self,var,ban=[]): + global waf_dir + lst=self.root.find_node(waf_dir).find_node('waflib/extras').ant_glob(var) + for x in lst: + if not x.name in ban: + load_tool(x.name.replace('.py','')) +cache_modules={} +def load_module(path): + try: + return cache_modules[path] + except KeyError: + pass + module=imp.new_module(WSCRIPT_FILE) + try: + code=Utils.readf(path,m='rU') + except(IOError,OSError): + raise Errors.WafError('Could not read the file %r'%path) + module_dir=os.path.dirname(path) + sys.path.insert(0,module_dir) + exec(compile(code,path,'exec'),module.__dict__) + sys.path.remove(module_dir) + cache_modules[path]=module + return module +def load_tool(tool,tooldir=None): + if tool=='java': + tool='javaw' + elif tool=='compiler_cc': + tool='compiler_c' + else: + tool=tool.replace('++','xx') + if tooldir: + assert isinstance(tooldir,list) + sys.path=tooldir+sys.path + try: + __import__(tool) + ret=sys.modules[tool] + Context.tools[tool]=ret + return ret + finally: + for d in tooldir: + sys.path.remove(d) + else: + global waf_dir + try: + os.stat(os.path.join(waf_dir,'waflib','extras',tool+'.py')) + except OSError: + try: + os.stat(os.path.join(waf_dir,'waflib','Tools',tool+'.py')) + except OSError: + d=tool + else: + d='waflib.Tools.%s'%tool + else: + d='waflib.extras.%s'%tool + __import__(d) + ret=sys.modules[d] + Context.tools[tool]=ret + return ret diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Context.pyc b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Context.pyc Binary files differnew file mode 100644 index 0000000..859fc7f --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Context.pyc diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Errors.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Errors.py new file mode 100644 index 0000000..aacc1a9 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Errors.py @@ -0,0 +1,37 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import traceback,sys +class WafError(Exception): + def __init__(self,msg='',ex=None): + self.msg=msg + assert not isinstance(msg,Exception) + self.stack=[] + if ex: + if not msg: + self.msg=str(ex) + if isinstance(ex,WafError): + self.stack=ex.stack + else: + self.stack=traceback.extract_tb(sys.exc_info()[2]) + self.stack+=traceback.extract_stack()[:-1] + self.verbose_msg=''.join(traceback.format_list(self.stack)) + def __str__(self): + return str(self.msg) +class BuildError(WafError): + def __init__(self,error_tasks=[]): + self.tasks=error_tasks + WafError.__init__(self,self.format_error()) + def format_error(self): + lst=['Build failed'] + for tsk in self.tasks: + txt=tsk.format_error() + if txt:lst.append(txt) + return'\n'.join(lst) +class ConfigurationError(WafError): + pass +class TaskRescan(WafError): + pass +class TaskNotReady(WafError): + pass diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Errors.pyc b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Errors.pyc Binary files differnew file mode 100644 index 0000000..17ce351 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Errors.pyc diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Logs.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Logs.py new file mode 100644 index 0000000..ef62452 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Logs.py @@ -0,0 +1,177 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,re,traceback,sys +_nocolor=os.environ.get('NOCOLOR','no')not in('no','0','false') +try: + if not _nocolor: + import waflib.ansiterm +except ImportError: + pass +try: + import threading +except ImportError: + if not'JOBS'in os.environ: + os.environ['JOBS']='1' +else: + wlock=threading.Lock() + class sync_stream(object): + def __init__(self,stream): + self.stream=stream + self.encoding=self.stream.encoding + def write(self,txt): + try: + wlock.acquire() + self.stream.write(txt) + self.stream.flush() + finally: + wlock.release() + def fileno(self): + return self.stream.fileno() + def flush(self): + self.stream.flush() + def isatty(self): + return self.stream.isatty() + if not os.environ.get('NOSYNC',False): + if id(sys.stdout)==id(sys.__stdout__): + sys.stdout=sync_stream(sys.stdout) + sys.stderr=sync_stream(sys.stderr) +import logging +LOG_FORMAT="%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s" +HOUR_FORMAT="%H:%M:%S" +zones='' +verbose=0 +colors_lst={'USE':True,'BOLD':'\x1b[01;1m','RED':'\x1b[01;31m','GREEN':'\x1b[32m','YELLOW':'\x1b[33m','PINK':'\x1b[35m','BLUE':'\x1b[01;34m','CYAN':'\x1b[36m','NORMAL':'\x1b[0m','cursor_on':'\x1b[?25h','cursor_off':'\x1b[?25l',} +got_tty=not os.environ.get('TERM','dumb')in['dumb','emacs'] +if got_tty: + try: + got_tty=sys.stderr.isatty()and sys.stdout.isatty() + except AttributeError: + got_tty=False +if(not got_tty and os.environ.get('TERM','dumb')!='msys')or _nocolor: + colors_lst['USE']=False +def get_term_cols(): + return 80 +try: + import struct,fcntl,termios +except ImportError: + pass +else: + if got_tty: + def get_term_cols_real(): + dummy_lines,cols=struct.unpack("HHHH",fcntl.ioctl(sys.stderr.fileno(),termios.TIOCGWINSZ,struct.pack("HHHH",0,0,0,0)))[:2] + return cols + try: + get_term_cols_real() + except Exception: + pass + else: + get_term_cols=get_term_cols_real +get_term_cols.__doc__=""" + Get the console width in characters. + + :return: the number of characters per line + :rtype: int + """ +def get_color(cl): + if not colors_lst['USE']:return'' + return colors_lst.get(cl,'') +class color_dict(object): + def __getattr__(self,a): + return get_color(a) + def __call__(self,a): + return get_color(a) +colors=color_dict() +re_log=re.compile(r'(\w+): (.*)',re.M) +class log_filter(logging.Filter): + def __init__(self,name=None): + pass + def filter(self,rec): + rec.c1=colors.PINK + rec.c2=colors.NORMAL + rec.zone=rec.module + if rec.levelno>=logging.INFO: + if rec.levelno>=logging.ERROR: + rec.c1=colors.RED + elif rec.levelno>=logging.WARNING: + rec.c1=colors.YELLOW + else: + rec.c1=colors.GREEN + return True + m=re_log.match(rec.msg) + if m: + rec.zone=m.group(1) + rec.msg=m.group(2) + if zones: + return getattr(rec,'zone','')in zones or'*'in zones + elif not verbose>2: + return False + return True +class formatter(logging.Formatter): + def __init__(self): + logging.Formatter.__init__(self,LOG_FORMAT,HOUR_FORMAT) + def format(self,rec): + if rec.levelno>=logging.WARNING or rec.levelno==logging.INFO: + try: + msg=rec.msg.decode('utf-8') + except Exception: + msg=rec.msg + return'%s%s%s'%(rec.c1,msg,rec.c2) + return logging.Formatter.format(self,rec) +log=None +def debug(*k,**kw): + if verbose: + k=list(k) + k[0]=k[0].replace('\n',' ') + global log + log.debug(*k,**kw) +def error(*k,**kw): + global log + log.error(*k,**kw) + if verbose>2: + st=traceback.extract_stack() + if st: + st=st[:-1] + buf=[] + for filename,lineno,name,line in st: + buf.append(' File "%s", line %d, in %s'%(filename,lineno,name)) + if line: + buf.append(' %s'%line.strip()) + if buf:log.error("\n".join(buf)) +def warn(*k,**kw): + global log + log.warn(*k,**kw) +def info(*k,**kw): + global log + log.info(*k,**kw) +def init_log(): + global log + log=logging.getLogger('waflib') + log.handlers=[] + log.filters=[] + hdlr=logging.StreamHandler() + hdlr.setFormatter(formatter()) + log.addHandler(hdlr) + log.addFilter(log_filter()) + log.setLevel(logging.DEBUG) +def make_logger(path,name): + logger=logging.getLogger(name) + hdlr=logging.FileHandler(path,'w') + formatter=logging.Formatter('%(message)s') + hdlr.setFormatter(formatter) + logger.addHandler(hdlr) + logger.setLevel(logging.DEBUG) + return logger +def make_mem_logger(name,to_log,size=10000): + from logging.handlers import MemoryHandler + logger=logging.getLogger(name) + hdlr=MemoryHandler(size,target=to_log) + formatter=logging.Formatter('%(message)s') + hdlr.setFormatter(formatter) + logger.addHandler(hdlr) + logger.memhandler=hdlr + logger.setLevel(logging.DEBUG) + return logger +def pprint(col,str,label='',sep='\n'): + sys.stderr.write("%s%s%s %s%s"%(colors(col),str,colors.NORMAL,label,sep)) diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Logs.pyc b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Logs.pyc Binary files differnew file mode 100644 index 0000000..6fd8925 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Logs.pyc diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Node.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Node.py new file mode 100644 index 0000000..7729613 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Node.py @@ -0,0 +1,467 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,re,sys,shutil +from waflib import Utils,Errors +exclude_regs=''' +**/*~ +**/#*# +**/.#* +**/%*% +**/._* +**/CVS +**/CVS/** +**/.cvsignore +**/SCCS +**/SCCS/** +**/vssver.scc +**/.svn +**/.svn/** +**/BitKeeper +**/.git +**/.git/** +**/.gitignore +**/.bzr +**/.bzrignore +**/.bzr/** +**/.hg +**/.hg/** +**/_MTN +**/_MTN/** +**/.arch-ids +**/{arch} +**/_darcs +**/_darcs/** +**/.intlcache +**/.DS_Store''' +def split_path(path): + return path.split('/') +def split_path_cygwin(path): + if path.startswith('//'): + ret=path.split('/')[2:] + ret[0]='/'+ret[0] + return ret + return path.split('/') +re_sp=re.compile('[/\\\\]') +def split_path_win32(path): + if path.startswith('\\\\'): + ret=re.split(re_sp,path)[2:] + ret[0]='\\'+ret[0] + return ret + return re.split(re_sp,path) +if sys.platform=='cygwin': + split_path=split_path_cygwin +elif Utils.is_win32: + split_path=split_path_win32 +class Node(object): + __slots__=('name','sig','children','parent','cache_abspath','cache_isdir','cache_sig') + def __init__(self,name,parent): + self.name=name + self.parent=parent + if parent: + if name in parent.children: + raise Errors.WafError('node %s exists in the parent files %r already'%(name,parent)) + parent.children[name]=self + def __setstate__(self,data): + self.name=data[0] + self.parent=data[1] + if data[2]is not None: + self.children=data[2] + if data[3]is not None: + self.sig=data[3] + def __getstate__(self): + return(self.name,self.parent,getattr(self,'children',None),getattr(self,'sig',None)) + def __str__(self): + return self.name + def __repr__(self): + return self.abspath() + def __hash__(self): + return id(self) + def __eq__(self,node): + return id(self)==id(node) + def __copy__(self): + raise Errors.WafError('nodes are not supposed to be copied') + def read(self,flags='r',encoding='ISO8859-1'): + return Utils.readf(self.abspath(),flags,encoding) + def write(self,data,flags='w',encoding='ISO8859-1'): + Utils.writef(self.abspath(),data,flags,encoding) + def chmod(self,val): + os.chmod(self.abspath(),val) + def delete(self): + try: + if hasattr(self,'children'): + shutil.rmtree(self.abspath()) + else: + os.remove(self.abspath()) + except OSError: + pass + self.evict() + def evict(self): + del self.parent.children[self.name] + def suffix(self): + k=max(0,self.name.rfind('.')) + return self.name[k:] + def height(self): + d=self + val=-1 + while d: + d=d.parent + val+=1 + return val + def listdir(self): + lst=Utils.listdir(self.abspath()) + lst.sort() + return lst + def mkdir(self): + if getattr(self,'cache_isdir',None): + return + try: + self.parent.mkdir() + except OSError: + pass + if self.name: + try: + os.makedirs(self.abspath()) + except OSError: + pass + if not os.path.isdir(self.abspath()): + raise Errors.WafError('Could not create the directory %s'%self.abspath()) + try: + self.children + except AttributeError: + self.children={} + self.cache_isdir=True + def find_node(self,lst): + if isinstance(lst,str): + lst=[x for x in split_path(lst)if x and x!='.'] + cur=self + for x in lst: + if x=='..': + cur=cur.parent or cur + continue + try: + ch=cur.children + except AttributeError: + cur.children={} + else: + try: + cur=cur.children[x] + continue + except KeyError: + pass + cur=self.__class__(x,cur) + try: + os.stat(cur.abspath()) + except OSError: + cur.evict() + return None + ret=cur + try: + os.stat(ret.abspath()) + except OSError: + ret.evict() + return None + try: + while not getattr(cur.parent,'cache_isdir',None): + cur=cur.parent + cur.cache_isdir=True + except AttributeError: + pass + return ret + def make_node(self,lst): + if isinstance(lst,str): + lst=[x for x in split_path(lst)if x and x!='.'] + cur=self + for x in lst: + if x=='..': + cur=cur.parent or cur + continue + if getattr(cur,'children',{}): + if x in cur.children: + cur=cur.children[x] + continue + else: + cur.children={} + cur=self.__class__(x,cur) + return cur + def search_node(self,lst): + if isinstance(lst,str): + lst=[x for x in split_path(lst)if x and x!='.'] + cur=self + for x in lst: + if x=='..': + cur=cur.parent or cur + else: + try: + cur=cur.children[x] + except(AttributeError,KeyError): + return None + return cur + def path_from(self,node): + c1=self + c2=node + c1h=c1.height() + c2h=c2.height() + lst=[] + up=0 + while c1h>c2h: + lst.append(c1.name) + c1=c1.parent + c1h-=1 + while c2h>c1h: + up+=1 + c2=c2.parent + c2h-=1 + while id(c1)!=id(c2): + lst.append(c1.name) + up+=1 + c1=c1.parent + c2=c2.parent + for i in range(up): + lst.append('..') + lst.reverse() + return os.sep.join(lst)or'.' + def abspath(self): + try: + return self.cache_abspath + except AttributeError: + pass + if os.sep=='/': + if not self.parent: + val=os.sep + elif not self.parent.name: + val=os.sep+self.name + else: + val=self.parent.abspath()+os.sep+self.name + else: + if not self.parent: + val='' + elif not self.parent.name: + val=self.name+os.sep + else: + val=self.parent.abspath().rstrip(os.sep)+os.sep+self.name + self.cache_abspath=val + return val + def is_child_of(self,node): + p=self + diff=self.height()-node.height() + while diff>0: + diff-=1 + p=p.parent + return id(p)==id(node) + def ant_iter(self,accept=None,maxdepth=25,pats=[],dir=False,src=True,remove=True): + dircont=self.listdir() + dircont.sort() + try: + lst=set(self.children.keys()) + except AttributeError: + self.children={} + else: + if remove: + for x in lst-set(dircont): + self.children[x].evict() + for name in dircont: + npats=accept(name,pats) + if npats and npats[0]: + accepted=[]in npats[0] + node=self.make_node([name]) + isdir=os.path.isdir(node.abspath()) + if accepted: + if isdir: + if dir: + yield node + else: + if src: + yield node + if getattr(node,'cache_isdir',None)or isdir: + node.cache_isdir=True + if maxdepth: + for k in node.ant_iter(accept=accept,maxdepth=maxdepth-1,pats=npats,dir=dir,src=src,remove=remove): + yield k + raise StopIteration + def ant_glob(self,*k,**kw): + src=kw.get('src',True) + dir=kw.get('dir',False) + excl=kw.get('excl',exclude_regs) + incl=k and k[0]or kw.get('incl','**') + reflags=kw.get('ignorecase',0)and re.I + def to_pat(s): + lst=Utils.to_list(s) + ret=[] + for x in lst: + x=x.replace('\\','/').replace('//','/') + if x.endswith('/'): + x+='**' + lst2=x.split('/') + accu=[] + for k in lst2: + if k=='**': + accu.append(k) + else: + k=k.replace('.','[.]').replace('*','.*').replace('?','.').replace('+','\\+') + k='^%s$'%k + try: + accu.append(re.compile(k,flags=reflags)) + except Exception ,e: + raise Errors.WafError("Invalid pattern: %s"%k,e) + ret.append(accu) + return ret + def filtre(name,nn): + ret=[] + for lst in nn: + if not lst: + pass + elif lst[0]=='**': + ret.append(lst) + if len(lst)>1: + if lst[1].match(name): + ret.append(lst[2:]) + else: + ret.append([]) + elif lst[0].match(name): + ret.append(lst[1:]) + return ret + def accept(name,pats): + nacc=filtre(name,pats[0]) + nrej=filtre(name,pats[1]) + if[]in nrej: + nacc=[] + return[nacc,nrej] + ret=[x for x in self.ant_iter(accept=accept,pats=[to_pat(incl),to_pat(excl)],maxdepth=kw.get('maxdepth',25),dir=dir,src=src,remove=kw.get('remove',True))] + if kw.get('flat',False): + return' '.join([x.path_from(self)for x in ret]) + return ret + def is_src(self): + cur=self + x=id(self.ctx.srcnode) + y=id(self.ctx.bldnode) + while cur.parent: + if id(cur)==y: + return False + if id(cur)==x: + return True + cur=cur.parent + return False + def is_bld(self): + cur=self + y=id(self.ctx.bldnode) + while cur.parent: + if id(cur)==y: + return True + cur=cur.parent + return False + def get_src(self): + cur=self + x=id(self.ctx.srcnode) + y=id(self.ctx.bldnode) + lst=[] + while cur.parent: + if id(cur)==y: + lst.reverse() + return self.ctx.srcnode.make_node(lst) + if id(cur)==x: + return self + lst.append(cur.name) + cur=cur.parent + return self + def get_bld(self): + cur=self + x=id(self.ctx.srcnode) + y=id(self.ctx.bldnode) + lst=[] + while cur.parent: + if id(cur)==y: + return self + if id(cur)==x: + lst.reverse() + return self.ctx.bldnode.make_node(lst) + lst.append(cur.name) + cur=cur.parent + lst.reverse() + if lst and Utils.is_win32 and len(lst[0])==2 and lst[0].endswith(':'): + lst[0]=lst[0][0] + return self.ctx.bldnode.make_node(['__root__']+lst) + def find_resource(self,lst): + if isinstance(lst,str): + lst=[x for x in split_path(lst)if x and x!='.'] + node=self.get_bld().search_node(lst) + if not node: + self=self.get_src() + node=self.find_node(lst) + if node: + if os.path.isdir(node.abspath()): + return None + return node + def find_or_declare(self,lst): + if isinstance(lst,str): + lst=[x for x in split_path(lst)if x and x!='.'] + node=self.get_bld().search_node(lst) + if node: + if not os.path.isfile(node.abspath()): + node.sig=None + node.parent.mkdir() + return node + self=self.get_src() + node=self.find_node(lst) + if node: + if not os.path.isfile(node.abspath()): + node.sig=None + node.parent.mkdir() + return node + node=self.get_bld().make_node(lst) + node.parent.mkdir() + return node + def find_dir(self,lst): + if isinstance(lst,str): + lst=[x for x in split_path(lst)if x and x!='.'] + node=self.find_node(lst) + try: + if not os.path.isdir(node.abspath()): + return None + except(OSError,AttributeError): + return None + return node + def change_ext(self,ext,ext_in=None): + name=self.name + if ext_in is None: + k=name.rfind('.') + if k>=0: + name=name[:k]+ext + else: + name=name+ext + else: + name=name[:-len(ext_in)]+ext + return self.parent.find_or_declare([name]) + def nice_path(self,env=None): + return self.path_from(self.ctx.launch_node()) + def bldpath(self): + return self.path_from(self.ctx.bldnode) + def srcpath(self): + return self.path_from(self.ctx.srcnode) + def relpath(self): + cur=self + x=id(self.ctx.bldnode) + while cur.parent: + if id(cur)==x: + return self.bldpath() + cur=cur.parent + return self.srcpath() + def bld_dir(self): + return self.parent.bldpath() + def bld_base(self): + s=os.path.splitext(self.name)[0] + return self.bld_dir()+os.sep+s + def get_bld_sig(self): + try: + return self.cache_sig + except AttributeError: + pass + if not self.is_bld()or self.ctx.bldnode is self.ctx.srcnode: + self.sig=Utils.h_file(self.abspath()) + self.cache_sig=ret=self.sig + return ret + search=search_node +pickle_lock=Utils.threading.Lock() +class Nod3(Node): + pass diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Node.pyc b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Node.pyc Binary files differnew file mode 100644 index 0000000..eca5aa5 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Node.pyc diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Options.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Options.py new file mode 100644 index 0000000..21f4254 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Options.py @@ -0,0 +1,135 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,tempfile,optparse,sys,re +from waflib import Logs,Utils,Context +cmds='distclean configure build install clean uninstall check dist distcheck'.split() +options={} +commands=[] +lockfile=os.environ.get('WAFLOCK','.lock-waf_%s_build'%sys.platform) +try:cache_global=os.path.abspath(os.environ['WAFCACHE']) +except KeyError:cache_global='' +platform=Utils.unversioned_sys_platform() +class opt_parser(optparse.OptionParser): + def __init__(self,ctx): + optparse.OptionParser.__init__(self,conflict_handler="resolve",version='waf %s (%s)'%(Context.WAFVERSION,Context.WAFREVISION)) + self.formatter.width=Logs.get_term_cols() + p=self.add_option + self.ctx=ctx + jobs=ctx.jobs() + p('-j','--jobs',dest='jobs',default=jobs,type='int',help='amount of parallel jobs (%r)'%jobs) + p('-k','--keep',dest='keep',default=0,action='count',help='keep running happily even if errors are found') + p('-v','--verbose',dest='verbose',default=0,action='count',help='verbosity level -v -vv or -vvv [default: 0]') + p('--nocache',dest='nocache',default=False,action='store_true',help='ignore the WAFCACHE (if set)') + p('--zones',dest='zones',default='',action='store',help='debugging zones (task_gen, deps, tasks, etc)') + gr=optparse.OptionGroup(self,'configure options') + self.add_option_group(gr) + gr.add_option('-o','--out',action='store',default='',help='build dir for the project',dest='out') + gr.add_option('-t','--top',action='store',default='',help='src dir for the project',dest='top') + default_prefix=os.environ.get('PREFIX') + if not default_prefix: + if platform=='win32': + d=tempfile.gettempdir() + default_prefix=d[0].upper()+d[1:] + else: + default_prefix='/usr/local/' + gr.add_option('--prefix',dest='prefix',default=default_prefix,help='installation prefix [default: %r]'%default_prefix) + gr.add_option('--download',dest='download',default=False,action='store_true',help='try to download the tools if missing') + gr=optparse.OptionGroup(self,'build and install options') + self.add_option_group(gr) + gr.add_option('-p','--progress',dest='progress_bar',default=0,action='count',help='-p: progress bar; -pp: ide output') + gr.add_option('--targets',dest='targets',default='',action='store',help='task generators, e.g. "target1,target2"') + gr=optparse.OptionGroup(self,'step options') + self.add_option_group(gr) + gr.add_option('--files',dest='files',default='',action='store',help='files to process, by regexp, e.g. "*/main.c,*/test/main.o"') + default_destdir=os.environ.get('DESTDIR','') + gr=optparse.OptionGroup(self,'install/uninstall options') + self.add_option_group(gr) + gr.add_option('--destdir',help='installation root [default: %r]'%default_destdir,default=default_destdir,dest='destdir') + gr.add_option('-f','--force',dest='force',default=False,action='store_true',help='force file installation') + gr.add_option('--distcheck-args',help='arguments to pass to distcheck',default=None,action='store') + def get_usage(self): + cmds_str={} + for cls in Context.classes: + if not cls.cmd or cls.cmd=='options': + continue + s=cls.__doc__ or'' + cmds_str[cls.cmd]=s + if Context.g_module: + for(k,v)in Context.g_module.__dict__.items(): + if k in['options','init','shutdown']: + continue + if type(v)is type(Context.create_context): + if v.__doc__ and not k.startswith('_'): + cmds_str[k]=v.__doc__ + just=0 + for k in cmds_str: + just=max(just,len(k)) + lst=[' %s: %s'%(k.ljust(just),v)for(k,v)in cmds_str.items()] + lst.sort() + ret='\n'.join(lst) + return'''waf [commands] [options] + +Main commands (example: ./waf build -j4) +%s +'''%ret +class OptionsContext(Context.Context): + cmd='options' + fun='options' + def __init__(self,**kw): + super(OptionsContext,self).__init__(**kw) + self.parser=opt_parser(self) + self.option_groups={} + def jobs(self): + count=int(os.environ.get('JOBS',0)) + if count<1: + if'NUMBER_OF_PROCESSORS'in os.environ: + count=int(os.environ.get('NUMBER_OF_PROCESSORS',1)) + else: + if hasattr(os,'sysconf_names'): + if'SC_NPROCESSORS_ONLN'in os.sysconf_names: + count=int(os.sysconf('SC_NPROCESSORS_ONLN')) + elif'SC_NPROCESSORS_CONF'in os.sysconf_names: + count=int(os.sysconf('SC_NPROCESSORS_CONF')) + if not count and os.name not in('nt','java'): + try: + tmp=self.cmd_and_log(['sysctl','-n','hw.ncpu'],quiet=0) + except Exception: + pass + else: + if re.match('^[0-9]+$',tmp): + count=int(tmp) + if count<1: + count=1 + elif count>1024: + count=1024 + return count + def add_option(self,*k,**kw): + return self.parser.add_option(*k,**kw) + def add_option_group(self,*k,**kw): + try: + gr=self.option_groups[k[0]] + except KeyError: + gr=self.parser.add_option_group(*k,**kw) + self.option_groups[k[0]]=gr + return gr + def get_option_group(self,opt_str): + try: + return self.option_groups[opt_str] + except KeyError: + for group in self.parser.option_groups: + if group.title==opt_str: + return group + return None + def parse_args(self,_args=None): + global options,commands + (options,leftover_args)=self.parser.parse_args(args=_args) + commands=leftover_args + if options.destdir: + options.destdir=os.path.abspath(os.path.expanduser(options.destdir)) + if options.verbose>=1: + self.load('errcheck') + def execute(self): + super(OptionsContext,self).execute() + self.parse_args() diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Options.pyc b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Options.pyc Binary files differnew file mode 100644 index 0000000..a5629f0 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Options.pyc diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Runner.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Runner.py new file mode 100644 index 0000000..15b6a27 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Runner.py @@ -0,0 +1,197 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import random,atexit +try: + from queue import Queue +except ImportError: + from Queue import Queue +from waflib import Utils,Task,Errors,Logs +GAP=10 +class TaskConsumer(Utils.threading.Thread): + def __init__(self): + Utils.threading.Thread.__init__(self) + self.ready=Queue() + self.setDaemon(1) + self.start() + def run(self): + try: + self.loop() + except Exception: + pass + def loop(self): + while 1: + tsk=self.ready.get() + if not isinstance(tsk,Task.TaskBase): + tsk(self) + else: + tsk.process() +pool=Queue() +def get_pool(): + try: + return pool.get(False) + except Exception: + return TaskConsumer() +def put_pool(x): + pool.put(x) +def _free_resources(): + global pool + lst=[] + while pool.qsize(): + lst.append(pool.get()) + for x in lst: + x.ready.put(None) + for x in lst: + x.join() + pool=None +atexit.register(_free_resources) +class Parallel(object): + def __init__(self,bld,j=2): + self.numjobs=j + self.bld=bld + self.outstanding=[] + self.frozen=[] + self.out=Queue(0) + self.count=0 + self.processed=1 + self.stop=False + self.error=[] + self.biter=None + self.dirty=False + def get_next_task(self): + if not self.outstanding: + return None + return self.outstanding.pop(0) + def postpone(self,tsk): + if random.randint(0,1): + self.frozen.insert(0,tsk) + else: + self.frozen.append(tsk) + def refill_task_list(self): + while self.count>self.numjobs*GAP: + self.get_out() + while not self.outstanding: + if self.count: + self.get_out() + elif self.frozen: + try: + cond=self.deadlock==self.processed + except AttributeError: + pass + else: + if cond: + msg='check the build order for the tasks' + for tsk in self.frozen: + if not tsk.run_after: + msg='check the methods runnable_status' + break + lst=[] + for tsk in self.frozen: + lst.append('%s\t-> %r'%(repr(tsk),[id(x)for x in tsk.run_after])) + raise Errors.WafError('Deadlock detected: %s%s'%(msg,''.join(lst))) + self.deadlock=self.processed + if self.frozen: + self.outstanding+=self.frozen + self.frozen=[] + elif not self.count: + self.outstanding.extend(self.biter.next()) + self.total=self.bld.total() + break + def add_more_tasks(self,tsk): + if getattr(tsk,'more_tasks',None): + self.outstanding+=tsk.more_tasks + self.total+=len(tsk.more_tasks) + def get_out(self): + tsk=self.out.get() + if not self.stop: + self.add_more_tasks(tsk) + self.count-=1 + self.dirty=True + return tsk + def error_handler(self,tsk): + if not self.bld.keep: + self.stop=True + self.error.append(tsk) + def add_task(self,tsk): + try: + self.pool + except AttributeError: + self.init_task_pool() + self.ready.put(tsk) + def init_task_pool(self): + pool=self.pool=[get_pool()for i in range(self.numjobs)] + self.ready=Queue(0) + def setq(consumer): + consumer.ready=self.ready + for x in pool: + x.ready.put(setq) + return pool + def free_task_pool(self): + def setq(consumer): + consumer.ready=Queue(0) + self.out.put(self) + try: + pool=self.pool + except AttributeError: + pass + else: + for x in pool: + self.ready.put(setq) + for x in pool: + self.get_out() + for x in pool: + put_pool(x) + self.pool=[] + def start(self): + self.total=self.bld.total() + while not self.stop: + self.refill_task_list() + tsk=self.get_next_task() + if not tsk: + if self.count: + continue + else: + break + if tsk.hasrun: + self.processed+=1 + continue + if self.stop: + break + try: + st=tsk.runnable_status() + except Exception: + self.processed+=1 + tsk.err_msg=Utils.ex_stack() + if not self.stop and self.bld.keep: + tsk.hasrun=Task.SKIPPED + if self.bld.keep==1: + if Logs.verbose>1 or not self.error: + self.error.append(tsk) + self.stop=True + else: + if Logs.verbose>1: + self.error.append(tsk) + continue + tsk.hasrun=Task.EXCEPTION + self.error_handler(tsk) + continue + if st==Task.ASK_LATER: + self.postpone(tsk) + elif st==Task.SKIP_ME: + self.processed+=1 + tsk.hasrun=Task.SKIPPED + self.add_more_tasks(tsk) + else: + tsk.position=(self.processed,self.total) + self.count+=1 + tsk.master=self + self.processed+=1 + if self.numjobs==1: + tsk.process() + else: + self.add_task(tsk) + while self.error and self.count: + self.get_out() + assert(self.count==0 or self.stop) + self.free_task_pool() diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Runner.pyc b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Runner.pyc Binary files differnew file mode 100644 index 0000000..d36dc62 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Runner.pyc diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Scripting.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Scripting.py new file mode 100644 index 0000000..afa38cb --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Scripting.py @@ -0,0 +1,376 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,shlex,shutil,traceback,errno,sys,stat +from waflib import Utils,Configure,Logs,Options,ConfigSet,Context,Errors,Build,Node +build_dir_override=None +no_climb_commands=['configure'] +default_cmd="build" +def waf_entry_point(current_directory,version,wafdir): + Logs.init_log() + if Context.WAFVERSION!=version: + Logs.error('Waf script %r and library %r do not match (directory %r)'%(version,Context.WAFVERSION,wafdir)) + sys.exit(1) + if'--version'in sys.argv: + Context.run_dir=current_directory + ctx=Context.create_context('options') + ctx.curdir=current_directory + ctx.parse_args() + sys.exit(0) + Context.waf_dir=wafdir + Context.launch_dir=current_directory + no_climb=os.environ.get('NOCLIMB',None) + if not no_climb: + for k in no_climb_commands: + if k in sys.argv: + no_climb=True + break + cur=current_directory + while cur: + lst=os.listdir(cur) + if Options.lockfile in lst: + env=ConfigSet.ConfigSet() + try: + env.load(os.path.join(cur,Options.lockfile)) + ino=os.stat(cur)[stat.ST_INO] + except Exception: + pass + else: + for x in[env.run_dir,env.top_dir,env.out_dir]: + if Utils.is_win32: + if cur==x: + load=True + break + else: + try: + ino2=os.stat(x)[stat.ST_INO] + except OSError: + pass + else: + if ino==ino2: + load=True + break + else: + Logs.warn('invalid lock file in %s'%cur) + load=False + if load: + Context.run_dir=env.run_dir + Context.top_dir=env.top_dir + Context.out_dir=env.out_dir + break + if not Context.run_dir: + if Context.WSCRIPT_FILE in lst: + Context.run_dir=cur + next=os.path.dirname(cur) + if next==cur: + break + cur=next + if no_climb: + break + if not Context.run_dir: + if'-h'in sys.argv or'--help'in sys.argv: + Logs.warn('No wscript file found: the help message may be incomplete') + Context.run_dir=current_directory + ctx=Context.create_context('options') + ctx.curdir=current_directory + ctx.parse_args() + sys.exit(0) + Logs.error('Waf: Run from a directory containing a file named %r'%Context.WSCRIPT_FILE) + sys.exit(1) + try: + os.chdir(Context.run_dir) + except OSError: + Logs.error('Waf: The folder %r is unreadable'%Context.run_dir) + sys.exit(1) + try: + set_main_module(Context.run_dir+os.sep+Context.WSCRIPT_FILE) + except Errors.WafError ,e: + Logs.pprint('RED',e.verbose_msg) + Logs.error(str(e)) + sys.exit(1) + except Exception ,e: + Logs.error('Waf: The wscript in %r is unreadable'%Context.run_dir,e) + traceback.print_exc(file=sys.stdout) + sys.exit(2) + try: + run_commands() + except Errors.WafError ,e: + if Logs.verbose>1: + Logs.pprint('RED',e.verbose_msg) + Logs.error(e.msg) + sys.exit(1) + except SystemExit: + raise + except Exception ,e: + traceback.print_exc(file=sys.stdout) + sys.exit(2) + except KeyboardInterrupt: + Logs.pprint('RED','Interrupted') + sys.exit(68) +def set_main_module(file_path): + Context.g_module=Context.load_module(file_path) + Context.g_module.root_path=file_path + def set_def(obj): + name=obj.__name__ + if not name in Context.g_module.__dict__: + setattr(Context.g_module,name,obj) + for k in[update,dist,distclean,distcheck,update]: + set_def(k) + if not'init'in Context.g_module.__dict__: + Context.g_module.init=Utils.nada + if not'shutdown'in Context.g_module.__dict__: + Context.g_module.shutdown=Utils.nada + if not'options'in Context.g_module.__dict__: + Context.g_module.options=Utils.nada +def parse_options(): + Context.create_context('options').execute() + if not Options.commands: + Options.commands=[default_cmd] + Options.commands=[x for x in Options.commands if x!='options'] + Logs.verbose=Options.options.verbose + Logs.init_log() + if Options.options.zones: + Logs.zones=Options.options.zones.split(',') + if not Logs.verbose: + Logs.verbose=1 + elif Logs.verbose>0: + Logs.zones=['runner'] + if Logs.verbose>2: + Logs.zones=['*'] +def run_command(cmd_name): + ctx=Context.create_context(cmd_name) + ctx.log_timer=Utils.Timer() + ctx.options=Options.options + ctx.cmd=cmd_name + ctx.execute() + return ctx +def run_commands(): + parse_options() + run_command('init') + while Options.commands: + cmd_name=Options.commands.pop(0) + ctx=run_command(cmd_name) + Logs.info('%r finished successfully (%s)'%(cmd_name,str(ctx.log_timer))) + run_command('shutdown') +def _can_distclean(name): + for k in'.o .moc .exe'.split(): + if name.endswith(k): + return True + return False +def distclean_dir(dirname): + for(root,dirs,files)in os.walk(dirname): + for f in files: + if _can_distclean(f): + fname=root+os.sep+f + try: + os.remove(fname) + except OSError: + Logs.warn('Could not remove %r'%fname) + for x in[Context.DBFILE,'config.log']: + try: + os.remove(x) + except OSError: + pass + try: + shutil.rmtree('c4che') + except OSError: + pass +def distclean(ctx): + '''removes the build directory''' + lst=os.listdir('.') + for f in lst: + if f==Options.lockfile: + try: + proj=ConfigSet.ConfigSet(f) + except IOError: + Logs.warn('Could not read %r'%f) + continue + if proj['out_dir']!=proj['top_dir']: + try: + shutil.rmtree(proj['out_dir']) + except IOError: + pass + except OSError ,e: + if e.errno!=errno.ENOENT: + Logs.warn('Could not remove %r'%proj['out_dir']) + else: + distclean_dir(proj['out_dir']) + for k in(proj['out_dir'],proj['top_dir'],proj['run_dir']): + p=os.path.join(k,Options.lockfile) + try: + os.remove(p) + except OSError ,e: + if e.errno!=errno.ENOENT: + Logs.warn('Could not remove %r'%p) + if not Options.commands: + for x in'.waf-1. waf-1. .waf3-1. waf3-1.'.split(): + if f.startswith(x): + shutil.rmtree(f,ignore_errors=True) +class Dist(Context.Context): + '''creates an archive containing the project source code''' + cmd='dist' + fun='dist' + algo='tar.bz2' + ext_algo={} + def execute(self): + self.recurse([os.path.dirname(Context.g_module.root_path)]) + self.archive() + def archive(self): + import tarfile + arch_name=self.get_arch_name() + try: + self.base_path + except AttributeError: + self.base_path=self.path + node=self.base_path.make_node(arch_name) + try: + node.delete() + except Exception: + pass + files=self.get_files() + if self.algo.startswith('tar.'): + tar=tarfile.open(arch_name,'w:'+self.algo.replace('tar.','')) + for x in files: + self.add_tar_file(x,tar) + tar.close() + elif self.algo=='zip': + import zipfile + zip=zipfile.ZipFile(arch_name,'w',compression=zipfile.ZIP_DEFLATED) + for x in files: + archive_name=self.get_base_name()+'/'+x.path_from(self.base_path) + zip.write(x.abspath(),archive_name,zipfile.ZIP_DEFLATED) + zip.close() + else: + self.fatal('Valid algo types are tar.bz2, tar.gz or zip') + try: + from hashlib import sha1 as sha + except ImportError: + from sha import sha + try: + digest=" (sha=%r)"%sha(node.read()).hexdigest() + except Exception: + digest='' + Logs.info('New archive created: %s%s'%(self.arch_name,digest)) + def get_tar_path(self,node): + return node.abspath() + def add_tar_file(self,x,tar): + p=self.get_tar_path(x) + tinfo=tar.gettarinfo(name=p,arcname=self.get_tar_prefix()+'/'+x.path_from(self.base_path)) + tinfo.uid=0 + tinfo.gid=0 + tinfo.uname='root' + tinfo.gname='root' + fu=None + try: + fu=open(p,'rb') + tar.addfile(tinfo,fileobj=fu) + finally: + if fu: + fu.close() + def get_tar_prefix(self): + try: + return self.tar_prefix + except AttributeError: + return self.get_base_name() + def get_arch_name(self): + try: + self.arch_name + except AttributeError: + self.arch_name=self.get_base_name()+'.'+self.ext_algo.get(self.algo,self.algo) + return self.arch_name + def get_base_name(self): + try: + self.base_name + except AttributeError: + appname=getattr(Context.g_module,Context.APPNAME,'noname') + version=getattr(Context.g_module,Context.VERSION,'1.0') + self.base_name=appname+'-'+version + return self.base_name + def get_excl(self): + try: + return self.excl + except AttributeError: + self.excl=Node.exclude_regs+' **/waf-1.7.* **/.waf-1.7* **/waf3-1.7.* **/.waf3-1.7* **/*~ **/*.rej **/*.orig **/*.pyc **/*.pyo **/*.bak **/*.swp **/.lock-w*' + nd=self.root.find_node(Context.out_dir) + if nd: + self.excl+=' '+nd.path_from(self.base_path) + return self.excl + def get_files(self): + try: + files=self.files + except AttributeError: + files=self.base_path.ant_glob('**/*',excl=self.get_excl()) + return files +def dist(ctx): + '''makes a tarball for redistributing the sources''' + pass +class DistCheck(Dist): + fun='distcheck' + cmd='distcheck' + def execute(self): + self.recurse([os.path.dirname(Context.g_module.root_path)]) + self.archive() + self.check() + def check(self): + import tempfile,tarfile + t=None + try: + t=tarfile.open(self.get_arch_name()) + for x in t: + t.extract(x) + finally: + if t: + t.close() + cfg=[] + if Options.options.distcheck_args: + cfg=shlex.split(Options.options.distcheck_args) + else: + cfg=[x for x in sys.argv if x.startswith('-')] + instdir=tempfile.mkdtemp('.inst',self.get_base_name()) + ret=Utils.subprocess.Popen([sys.executable,sys.argv[0],'configure','install','uninstall','--destdir='+instdir]+cfg,cwd=self.get_base_name()).wait() + if ret: + raise Errors.WafError('distcheck failed with code %i'%ret) + if os.path.exists(instdir): + raise Errors.WafError('distcheck succeeded, but files were left in %s'%instdir) + shutil.rmtree(self.get_base_name()) +def distcheck(ctx): + '''checks if the project compiles (tarball from 'dist')''' + pass +def update(ctx): + '''updates the plugins from the *waflib/extras* directory''' + lst=Options.options.files.split(',') + if not lst: + lst=[x for x in Utils.listdir(Context.waf_dir+'/waflib/extras')if x.endswith('.py')] + for x in lst: + tool=x.replace('.py','') + try: + Configure.download_tool(tool,force=True,ctx=ctx) + except Errors.WafError: + Logs.error('Could not find the tool %s in the remote repository'%x) +def autoconfigure(execute_method): + def execute(self): + if not Configure.autoconfig: + return execute_method(self) + env=ConfigSet.ConfigSet() + do_config=False + try: + env.load(os.path.join(Context.top_dir,Options.lockfile)) + except Exception: + Logs.warn('Configuring the project') + do_config=True + else: + if env.run_dir!=Context.run_dir: + do_config=True + else: + h=0 + for f in env['files']: + h=Utils.h_list((h,Utils.readf(f,'rb'))) + do_config=h!=env.hash + if do_config: + Options.commands.insert(0,self.cmd) + Options.commands.insert(0,'configure') + return + return execute_method(self) + return execute +Build.BuildContext.execute=autoconfigure(Build.BuildContext.execute) diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Scripting.pyc b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Scripting.pyc Binary files differnew file mode 100644 index 0000000..bcdad69 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Scripting.pyc diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Task.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Task.py new file mode 100644 index 0000000..8cc4cc3 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Task.py @@ -0,0 +1,683 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,shutil,re,tempfile +from waflib import Utils,Logs,Errors +NOT_RUN=0 +MISSING=1 +CRASHED=2 +EXCEPTION=3 +SKIPPED=8 +SUCCESS=9 +ASK_LATER=-1 +SKIP_ME=-2 +RUN_ME=-3 +COMPILE_TEMPLATE_SHELL=''' +def f(tsk): + env = tsk.env + gen = tsk.generator + bld = gen.bld + wd = getattr(tsk, 'cwd', None) + p = env.get_flat + tsk.last_cmd = cmd = \'\'\' %s \'\'\' % s + return tsk.exec_command(cmd, cwd=wd, env=env.env or None) +''' +COMPILE_TEMPLATE_NOSHELL=''' +def f(tsk): + env = tsk.env + gen = tsk.generator + bld = gen.bld + wd = getattr(tsk, 'cwd', None) + def to_list(xx): + if isinstance(xx, str): return [xx] + return xx + tsk.last_cmd = lst = [] + %s + lst = [x for x in lst if x] + return tsk.exec_command(lst, cwd=wd, env=env.env or None) +''' +def cache_outputs(cls): + m1=cls.run + def run(self): + bld=self.generator.bld + if bld.cache_global and not bld.nocache: + if self.can_retrieve_cache(): + return 0 + return m1(self) + cls.run=run + m2=cls.post_run + def post_run(self): + bld=self.generator.bld + ret=m2(self) + if bld.cache_global and not bld.nocache: + self.put_files_cache() + return ret + cls.post_run=post_run + return cls +classes={} +class store_task_type(type): + def __init__(cls,name,bases,dict): + super(store_task_type,cls).__init__(name,bases,dict) + name=cls.__name__ + if name.endswith('_task'): + name=name.replace('_task','') + if name!='evil'and name!='TaskBase': + global classes + if getattr(cls,'run_str',None): + (f,dvars)=compile_fun(cls.run_str,cls.shell) + cls.hcode=cls.run_str + cls.run_str=None + cls.run=f + cls.vars=list(set(cls.vars+dvars)) + cls.vars.sort() + elif getattr(cls,'run',None)and not'hcode'in cls.__dict__: + cls.hcode=Utils.h_fun(cls.run) + if not getattr(cls,'nocache',None): + cls=cache_outputs(cls) + getattr(cls,'register',classes)[name]=cls +evil=store_task_type('evil',(object,),{}) +class TaskBase(evil): + color='GREEN' + ext_in=[] + ext_out=[] + before=[] + after=[] + hcode='' + def __init__(self,*k,**kw): + self.hasrun=NOT_RUN + try: + self.generator=kw['generator'] + except KeyError: + self.generator=self + def __repr__(self): + return'\n\t{task %r: %s %s}'%(self.__class__.__name__,id(self),str(getattr(self,'fun',''))) + def __str__(self): + if hasattr(self,'fun'): + return'executing: %s\n'%self.fun.__name__ + return self.__class__.__name__+'\n' + def __hash__(self): + return id(self) + def exec_command(self,cmd,**kw): + bld=self.generator.bld + try: + if not kw.get('cwd',None): + kw['cwd']=bld.cwd + except AttributeError: + bld.cwd=kw['cwd']=bld.variant_dir + return bld.exec_command(cmd,**kw) + def runnable_status(self): + return RUN_ME + def process(self): + m=self.master + if m.stop: + m.out.put(self) + return + try: + del self.generator.bld.task_sigs[self.uid()] + except KeyError: + pass + try: + self.generator.bld.returned_tasks.append(self) + self.log_display(self.generator.bld) + ret=self.run() + except Exception: + self.err_msg=Utils.ex_stack() + self.hasrun=EXCEPTION + m.error_handler(self) + m.out.put(self) + return + if ret: + self.err_code=ret + self.hasrun=CRASHED + else: + try: + self.post_run() + except Errors.WafError: + pass + except Exception: + self.err_msg=Utils.ex_stack() + self.hasrun=EXCEPTION + else: + self.hasrun=SUCCESS + if self.hasrun!=SUCCESS: + m.error_handler(self) + m.out.put(self) + def run(self): + if hasattr(self,'fun'): + return self.fun(self) + return 0 + def post_run(self): + pass + def log_display(self,bld): + bld.to_log(self.display()) + def display(self): + col1=Logs.colors(self.color) + col2=Logs.colors.NORMAL + master=self.master + def cur(): + tmp=-1 + if hasattr(master,'ready'): + tmp-=master.ready.qsize() + return master.processed+tmp + if self.generator.bld.progress_bar==1: + return self.generator.bld.progress_line(cur(),master.total,col1,col2) + if self.generator.bld.progress_bar==2: + ela=str(self.generator.bld.timer) + try: + ins=','.join([n.name for n in self.inputs]) + except AttributeError: + ins='' + try: + outs=','.join([n.name for n in self.outputs]) + except AttributeError: + outs='' + return'|Total %s|Current %s|Inputs %s|Outputs %s|Time %s|\n'%(master.total,cur(),ins,outs,ela) + s=str(self) + if not s: + return None + total=master.total + n=len(str(total)) + fs='[%%%dd/%%%dd] %%s%%s%%s'%(n,n) + return fs%(cur(),total,col1,s,col2) + def attr(self,att,default=None): + ret=getattr(self,att,self) + if ret is self:return getattr(self.__class__,att,default) + return ret + def hash_constraints(self): + cls=self.__class__ + tup=(str(cls.before),str(cls.after),str(cls.ext_in),str(cls.ext_out),cls.__name__,cls.hcode) + h=hash(tup) + return h + def format_error(self): + msg=getattr(self,'last_cmd','') + name=getattr(self.generator,'name','') + if getattr(self,"err_msg",None): + return self.err_msg + elif not self.hasrun: + return'task in %r was not executed for some reason: %r'%(name,self) + elif self.hasrun==CRASHED: + try: + return' -> task in %r failed (exit status %r): %r\n%r'%(name,self.err_code,self,msg) + except AttributeError: + return' -> task in %r failed: %r\n%r'%(name,self,msg) + elif self.hasrun==MISSING: + return' -> missing files in %r: %r\n%r'%(name,self,msg) + else: + return'invalid status for task in %r: %r'%(name,self.hasrun) + def colon(self,var1,var2): + tmp=self.env[var1] + if isinstance(var2,str): + it=self.env[var2] + else: + it=var2 + if isinstance(tmp,str): + return[tmp%x for x in it] + else: + if Logs.verbose and not tmp and it: + Logs.warn('Missing env variable %r for task %r (generator %r)'%(var1,self,self.generator)) + lst=[] + for y in it: + lst.extend(tmp) + lst.append(y) + return lst +class Task(TaskBase): + vars=[] + shell=False + def __init__(self,*k,**kw): + TaskBase.__init__(self,*k,**kw) + self.env=kw['env'] + self.inputs=[] + self.outputs=[] + self.dep_nodes=[] + self.run_after=set([]) + def __str__(self): + env=self.env + src_str=' '.join([a.nice_path()for a in self.inputs]) + tgt_str=' '.join([a.nice_path()for a in self.outputs]) + if self.outputs:sep=' -> ' + else:sep='' + return'%s: %s%s%s\n'%(self.__class__.__name__.replace('_task',''),src_str,sep,tgt_str) + def __repr__(self): + try: + ins=",".join([x.name for x in self.inputs]) + outs=",".join([x.name for x in self.outputs]) + except AttributeError: + ins=",".join([str(x)for x in self.inputs]) + outs=",".join([str(x)for x in self.outputs]) + return"".join(['\n\t{task %r: '%id(self),self.__class__.__name__," ",ins," -> ",outs,'}']) + def uid(self): + try: + return self.uid_ + except AttributeError: + m=Utils.md5() + up=m.update + up(self.__class__.__name__) + for x in self.inputs+self.outputs: + up(x.abspath()) + self.uid_=m.digest() + return self.uid_ + def set_inputs(self,inp): + if isinstance(inp,list):self.inputs+=inp + else:self.inputs.append(inp) + def set_outputs(self,out): + if isinstance(out,list):self.outputs+=out + else:self.outputs.append(out) + def set_run_after(self,task): + assert isinstance(task,TaskBase) + self.run_after.add(task) + def signature(self): + try:return self.cache_sig + except AttributeError:pass + self.m=Utils.md5() + self.m.update(self.hcode) + self.sig_explicit_deps() + self.sig_vars() + if self.scan: + try: + self.sig_implicit_deps() + except Errors.TaskRescan: + return self.signature() + ret=self.cache_sig=self.m.digest() + return ret + def runnable_status(self): + for t in self.run_after: + if not t.hasrun: + return ASK_LATER + bld=self.generator.bld + try: + new_sig=self.signature() + except Errors.TaskNotReady: + return ASK_LATER + key=self.uid() + try: + prev_sig=bld.task_sigs[key] + except KeyError: + Logs.debug("task: task %r must run as it was never run before or the task code changed"%self) + return RUN_ME + for node in self.outputs: + try: + if node.sig!=new_sig: + return RUN_ME + except AttributeError: + Logs.debug("task: task %r must run as the output nodes do not exist"%self) + return RUN_ME + if new_sig!=prev_sig: + return RUN_ME + return SKIP_ME + def post_run(self): + bld=self.generator.bld + sig=self.signature() + for node in self.outputs: + try: + os.stat(node.abspath()) + except OSError: + self.hasrun=MISSING + self.err_msg='-> missing file: %r'%node.abspath() + raise Errors.WafError(self.err_msg) + node.sig=sig + bld.task_sigs[self.uid()]=self.cache_sig + def sig_explicit_deps(self): + bld=self.generator.bld + upd=self.m.update + for x in self.inputs+self.dep_nodes: + try: + upd(x.get_bld_sig()) + except(AttributeError,TypeError): + raise Errors.WafError('Missing node signature for %r (required by %r)'%(x,self)) + if bld.deps_man: + additional_deps=bld.deps_man + for x in self.inputs+self.outputs: + try: + d=additional_deps[id(x)] + except KeyError: + continue + for v in d: + if isinstance(v,bld.root.__class__): + try: + v=v.get_bld_sig() + except AttributeError: + raise Errors.WafError('Missing node signature for %r (required by %r)'%(v,self)) + elif hasattr(v,'__call__'): + v=v() + upd(v) + return self.m.digest() + def sig_vars(self): + bld=self.generator.bld + env=self.env + upd=self.m.update + act_sig=bld.hash_env_vars(env,self.__class__.vars) + upd(act_sig) + dep_vars=getattr(self,'dep_vars',None) + if dep_vars: + upd(bld.hash_env_vars(env,dep_vars)) + return self.m.digest() + scan=None + def sig_implicit_deps(self): + bld=self.generator.bld + key=self.uid() + prev=bld.task_sigs.get((key,'imp'),[]) + if prev: + try: + if prev==self.compute_sig_implicit_deps(): + return prev + except Exception: + for x in bld.node_deps.get(self.uid(),[]): + if x.is_child_of(bld.srcnode): + try: + os.stat(x.abspath()) + except OSError: + try: + del x.parent.children[x.name] + except KeyError: + pass + del bld.task_sigs[(key,'imp')] + raise Errors.TaskRescan('rescan') + (nodes,names)=self.scan() + if Logs.verbose: + Logs.debug('deps: scanner for %s returned %s %s'%(str(self),str(nodes),str(names))) + bld.node_deps[key]=nodes + bld.raw_deps[key]=names + self.are_implicit_nodes_ready() + try: + bld.task_sigs[(key,'imp')]=sig=self.compute_sig_implicit_deps() + except Exception: + if Logs.verbose: + for k in bld.node_deps.get(self.uid(),[]): + try: + k.get_bld_sig() + except Exception: + Logs.warn('Missing signature for node %r (may cause rebuilds)'%k) + else: + return sig + def compute_sig_implicit_deps(self): + upd=self.m.update + bld=self.generator.bld + self.are_implicit_nodes_ready() + for k in bld.node_deps.get(self.uid(),[]): + upd(k.get_bld_sig()) + return self.m.digest() + def are_implicit_nodes_ready(self): + bld=self.generator.bld + try: + cache=bld.dct_implicit_nodes + except AttributeError: + bld.dct_implicit_nodes=cache={} + try: + dct=cache[bld.cur] + except KeyError: + dct=cache[bld.cur]={} + for tsk in bld.cur_tasks: + for x in tsk.outputs: + dct[x]=tsk + modified=False + for x in bld.node_deps.get(self.uid(),[]): + if x in dct: + self.run_after.add(dct[x]) + modified=True + if modified: + for tsk in self.run_after: + if not tsk.hasrun: + raise Errors.TaskNotReady('not ready') + def can_retrieve_cache(self): + if not getattr(self,'outputs',None): + return None + sig=self.signature() + ssig=Utils.to_hex(self.uid())+Utils.to_hex(sig) + dname=os.path.join(self.generator.bld.cache_global,ssig) + try: + t1=os.stat(dname).st_mtime + except OSError: + return None + for node in self.outputs: + orig=os.path.join(dname,node.name) + try: + shutil.copy2(orig,node.abspath()) + os.utime(orig,None) + except(OSError,IOError): + Logs.debug('task: failed retrieving file') + return None + try: + t2=os.stat(dname).st_mtime + except OSError: + return None + if t1!=t2: + return None + for node in self.outputs: + node.sig=sig + if self.generator.bld.progress_bar<1: + self.generator.bld.to_log('restoring from cache %r\n'%node.abspath()) + self.cached=True + return True + def put_files_cache(self): + if getattr(self,'cached',None): + return None + if not getattr(self,'outputs',None): + return None + sig=self.signature() + ssig=Utils.to_hex(self.uid())+Utils.to_hex(sig) + dname=os.path.join(self.generator.bld.cache_global,ssig) + tmpdir=tempfile.mkdtemp(prefix=self.generator.bld.cache_global+os.sep+'waf') + try: + shutil.rmtree(dname) + except Exception: + pass + try: + for node in self.outputs: + dest=os.path.join(tmpdir,node.name) + shutil.copy2(node.abspath(),dest) + except(OSError,IOError): + try: + shutil.rmtree(tmpdir) + except Exception: + pass + else: + try: + os.rename(tmpdir,dname) + except OSError: + try: + shutil.rmtree(tmpdir) + except Exception: + pass + else: + try: + os.chmod(dname,Utils.O755) + except Exception: + pass +def is_before(t1,t2): + to_list=Utils.to_list + for k in to_list(t2.ext_in): + if k in to_list(t1.ext_out): + return 1 + if t1.__class__.__name__ in to_list(t2.after): + return 1 + if t2.__class__.__name__ in to_list(t1.before): + return 1 + return 0 +def set_file_constraints(tasks): + ins=Utils.defaultdict(set) + outs=Utils.defaultdict(set) + for x in tasks: + for a in getattr(x,'inputs',[])+getattr(x,'dep_nodes',[]): + ins[id(a)].add(x) + for a in getattr(x,'outputs',[]): + outs[id(a)].add(x) + links=set(ins.keys()).intersection(outs.keys()) + for k in links: + for a in ins[k]: + a.run_after.update(outs[k]) +def set_precedence_constraints(tasks): + cstr_groups=Utils.defaultdict(list) + for x in tasks: + h=x.hash_constraints() + cstr_groups[h].append(x) + keys=list(cstr_groups.keys()) + maxi=len(keys) + for i in range(maxi): + t1=cstr_groups[keys[i]][0] + for j in range(i+1,maxi): + t2=cstr_groups[keys[j]][0] + if is_before(t1,t2): + a=i + b=j + elif is_before(t2,t1): + a=j + b=i + else: + continue + aval=set(cstr_groups[keys[a]]) + for x in cstr_groups[keys[b]]: + x.run_after.update(aval) +def funex(c): + dc={} + exec(c,dc) + return dc['f'] +reg_act=re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<var>\w+)(?P<code>.*?)\})",re.M) +def compile_fun_shell(line): + extr=[] + def repl(match): + g=match.group + if g('dollar'):return"$" + elif g('backslash'):return'\\\\' + elif g('subst'):extr.append((g('var'),g('code')));return"%s" + return None + line=reg_act.sub(repl,line)or line + parm=[] + dvars=[] + app=parm.append + for(var,meth)in extr: + if var=='SRC': + if meth:app('tsk.inputs%s'%meth) + else:app('" ".join([a.path_from(bld.bldnode) for a in tsk.inputs])') + elif var=='TGT': + if meth:app('tsk.outputs%s'%meth) + else:app('" ".join([a.path_from(bld.bldnode) for a in tsk.outputs])') + elif meth: + if meth.startswith(':'): + m=meth[1:] + if m=='SRC': + m='[a.path_from(bld.bldnode) for a in tsk.inputs]' + elif m=='TGT': + m='[a.path_from(bld.bldnode) for a in tsk.outputs]' + elif m[:3]not in('tsk','gen','bld'): + dvars.extend([var,meth[1:]]) + m='%r'%m + app('" ".join(tsk.colon(%r, %s))'%(var,m)) + else: + app('%s%s'%(var,meth)) + else: + if not var in dvars:dvars.append(var) + app("p('%s')"%var) + if parm:parm="%% (%s) "%(',\n\t\t'.join(parm)) + else:parm='' + c=COMPILE_TEMPLATE_SHELL%(line,parm) + Logs.debug('action: %s'%c.strip().splitlines()) + return(funex(c),dvars) +def compile_fun_noshell(line): + extr=[] + def repl(match): + g=match.group + if g('dollar'):return"$" + elif g('subst'):extr.append((g('var'),g('code')));return"<<|@|>>" + return None + line2=reg_act.sub(repl,line) + params=line2.split('<<|@|>>') + assert(extr) + buf=[] + dvars=[] + app=buf.append + for x in range(len(extr)): + params[x]=params[x].strip() + if params[x]: + app("lst.extend(%r)"%params[x].split()) + (var,meth)=extr[x] + if var=='SRC': + if meth:app('lst.append(tsk.inputs%s)'%meth) + else:app("lst.extend([a.path_from(bld.bldnode) for a in tsk.inputs])") + elif var=='TGT': + if meth:app('lst.append(tsk.outputs%s)'%meth) + else:app("lst.extend([a.path_from(bld.bldnode) for a in tsk.outputs])") + elif meth: + if meth.startswith(':'): + m=meth[1:] + if m=='SRC': + m='[a.path_from(bld.bldnode) for a in tsk.inputs]' + elif m=='TGT': + m='[a.path_from(bld.bldnode) for a in tsk.outputs]' + elif m[:3]not in('tsk','gen','bld'): + dvars.extend([var,m]) + m='%r'%m + app('lst.extend(tsk.colon(%r, %s))'%(var,m)) + else: + app('lst.extend(gen.to_list(%s%s))'%(var,meth)) + else: + app('lst.extend(to_list(env[%r]))'%var) + if not var in dvars:dvars.append(var) + if extr: + if params[-1]: + app("lst.extend(%r)"%params[-1].split()) + fun=COMPILE_TEMPLATE_NOSHELL%"\n\t".join(buf) + Logs.debug('action: %s'%fun.strip().splitlines()) + return(funex(fun),dvars) +def compile_fun(line,shell=False): + if line.find('<')>0 or line.find('>')>0 or line.find('&&')>0: + shell=True + if shell: + return compile_fun_shell(line) + else: + return compile_fun_noshell(line) +def task_factory(name,func=None,vars=None,color='GREEN',ext_in=[],ext_out=[],before=[],after=[],shell=False,scan=None): + params={'vars':vars or[],'color':color,'name':name,'ext_in':Utils.to_list(ext_in),'ext_out':Utils.to_list(ext_out),'before':Utils.to_list(before),'after':Utils.to_list(after),'shell':shell,'scan':scan,} + if isinstance(func,str): + params['run_str']=func + else: + params['run']=func + cls=type(Task)(name,(Task,),params) + global classes + classes[name]=cls + return cls +def always_run(cls): + old=cls.runnable_status + def always(self): + ret=old(self) + if ret==SKIP_ME: + ret=RUN_ME + return ret + cls.runnable_status=always + return cls +def update_outputs(cls): + old_post_run=cls.post_run + def post_run(self): + old_post_run(self) + for node in self.outputs: + node.sig=Utils.h_file(node.abspath()) + self.generator.bld.task_sigs[node.abspath()]=self.uid() + cls.post_run=post_run + old_runnable_status=cls.runnable_status + def runnable_status(self): + status=old_runnable_status(self) + if status!=RUN_ME: + return status + try: + bld=self.generator.bld + prev_sig=bld.task_sigs[self.uid()] + if prev_sig==self.signature(): + for x in self.outputs: + if not x.is_child_of(bld.bldnode): + x.sig=Utils.h_file(x.abspath()) + if not x.sig or bld.task_sigs[x.abspath()]!=self.uid(): + return RUN_ME + return SKIP_ME + except OSError: + pass + except IOError: + pass + except KeyError: + pass + except IndexError: + pass + except AttributeError: + pass + return RUN_ME + cls.runnable_status=runnable_status + return cls diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Task.pyc b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Task.pyc Binary files differnew file mode 100644 index 0000000..09ae716 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Task.pyc diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/TaskGen.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/TaskGen.py new file mode 100644 index 0000000..30c05c1 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/TaskGen.py @@ -0,0 +1,405 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import copy,re,os +from waflib import Task,Utils,Logs,Errors,ConfigSet,Node +feats=Utils.defaultdict(set) +class task_gen(object): + mappings={} + prec=Utils.defaultdict(list) + def __init__(self,*k,**kw): + self.source='' + self.target='' + self.meths=[] + self.prec=Utils.defaultdict(list) + self.mappings={} + self.features=[] + self.tasks=[] + if not'bld'in kw: + self.env=ConfigSet.ConfigSet() + self.idx=0 + self.path=None + else: + self.bld=kw['bld'] + self.env=self.bld.env.derive() + self.path=self.bld.path + try: + self.idx=self.bld.idx[id(self.path)]=self.bld.idx.get(id(self.path),0)+1 + except AttributeError: + self.bld.idx={} + self.idx=self.bld.idx[id(self.path)]=1 + for key,val in kw.items(): + setattr(self,key,val) + def __str__(self): + return"<task_gen %r declared in %s>"%(self.name,self.path.abspath()) + def __repr__(self): + lst=[] + for x in self.__dict__.keys(): + if x not in['env','bld','compiled_tasks','tasks']: + lst.append("%s=%s"%(x,repr(getattr(self,x)))) + return"bld(%s) in %s"%(", ".join(lst),self.path.abspath()) + def get_name(self): + try: + return self._name + except AttributeError: + if isinstance(self.target,list): + lst=[str(x)for x in self.target] + name=self._name=','.join(lst) + else: + name=self._name=str(self.target) + return name + def set_name(self,name): + self._name=name + name=property(get_name,set_name) + def to_list(self,val): + if isinstance(val,str):return val.split() + else:return val + def post(self): + if getattr(self,'posted',None): + return False + self.posted=True + keys=set(self.meths) + self.features=Utils.to_list(self.features) + for x in self.features+['*']: + st=feats[x] + if not st: + if not x in Task.classes: + Logs.warn('feature %r does not exist - bind at least one method to it'%x) + keys.update(list(st)) + prec={} + prec_tbl=self.prec or task_gen.prec + for x in prec_tbl: + if x in keys: + prec[x]=prec_tbl[x] + tmp=[] + for a in keys: + for x in prec.values(): + if a in x:break + else: + tmp.append(a) + tmp.sort() + out=[] + while tmp: + e=tmp.pop() + if e in keys:out.append(e) + try: + nlst=prec[e] + except KeyError: + pass + else: + del prec[e] + for x in nlst: + for y in prec: + if x in prec[y]: + break + else: + tmp.append(x) + if prec: + raise Errors.WafError('Cycle detected in the method execution %r'%prec) + out.reverse() + self.meths=out + Logs.debug('task_gen: posting %s %d'%(self,id(self))) + for x in out: + try: + v=getattr(self,x) + except AttributeError: + raise Errors.WafError('%r is not a valid task generator method'%x) + Logs.debug('task_gen: -> %s (%d)'%(x,id(self))) + v() + Logs.debug('task_gen: posted %s'%self.name) + return True + def get_hook(self,node): + name=node.name + for k in self.mappings: + if name.endswith(k): + return self.mappings[k] + for k in task_gen.mappings: + if name.endswith(k): + return task_gen.mappings[k] + raise Errors.WafError("File %r has no mapping in %r (did you forget to load a waf tool?)"%(node,task_gen.mappings.keys())) + def create_task(self,name,src=None,tgt=None): + task=Task.classes[name](env=self.env.derive(),generator=self) + if src: + task.set_inputs(src) + if tgt: + task.set_outputs(tgt) + self.tasks.append(task) + return task + def clone(self,env): + newobj=self.bld() + for x in self.__dict__: + if x in['env','bld']: + continue + elif x in['path','features']: + setattr(newobj,x,getattr(self,x)) + else: + setattr(newobj,x,copy.copy(getattr(self,x))) + newobj.posted=False + if isinstance(env,str): + newobj.env=self.bld.all_envs[env].derive() + else: + newobj.env=env.derive() + return newobj +def declare_chain(name='',rule=None,reentrant=None,color='BLUE',ext_in=[],ext_out=[],before=[],after=[],decider=None,scan=None,install_path=None,shell=False): + ext_in=Utils.to_list(ext_in) + ext_out=Utils.to_list(ext_out) + if not name: + name=rule + cls=Task.task_factory(name,rule,color=color,ext_in=ext_in,ext_out=ext_out,before=before,after=after,scan=scan,shell=shell) + def x_file(self,node): + ext=decider and decider(self,node)or cls.ext_out + if ext_in: + _ext_in=ext_in[0] + tsk=self.create_task(name,node) + cnt=0 + keys=list(self.mappings.keys())+list(self.__class__.mappings.keys()) + for x in ext: + k=node.change_ext(x,ext_in=_ext_in) + tsk.outputs.append(k) + if reentrant!=None: + if cnt<int(reentrant): + self.source.append(k) + else: + for y in keys: + if k.name.endswith(y): + self.source.append(k) + break + cnt+=1 + if install_path: + self.bld.install_files(install_path,tsk.outputs) + return tsk + for x in cls.ext_in: + task_gen.mappings[x]=x_file + return x_file +def taskgen_method(func): + setattr(task_gen,func.__name__,func) + return func +def feature(*k): + def deco(func): + setattr(task_gen,func.__name__,func) + for name in k: + feats[name].update([func.__name__]) + return func + return deco +def before_method(*k): + def deco(func): + setattr(task_gen,func.__name__,func) + for fun_name in k: + if not func.__name__ in task_gen.prec[fun_name]: + task_gen.prec[fun_name].append(func.__name__) + return func + return deco +before=before_method +def after_method(*k): + def deco(func): + setattr(task_gen,func.__name__,func) + for fun_name in k: + if not fun_name in task_gen.prec[func.__name__]: + task_gen.prec[func.__name__].append(fun_name) + return func + return deco +after=after_method +def extension(*k): + def deco(func): + setattr(task_gen,func.__name__,func) + for x in k: + task_gen.mappings[x]=func + return func + return deco +@taskgen_method +def to_nodes(self,lst,path=None): + tmp=[] + path=path or self.path + find=path.find_resource + if isinstance(lst,self.path.__class__): + lst=[lst] + for x in Utils.to_list(lst): + if isinstance(x,str): + node=find(x) + else: + node=x + if not node: + raise Errors.WafError("source not found: %r in %r"%(x,self)) + tmp.append(node) + return tmp +@feature('*') +def process_source(self): + self.source=self.to_nodes(getattr(self,'source',[])) + for node in self.source: + self.get_hook(node)(self,node) +@feature('*') +@before_method('process_source') +def process_rule(self): + if not getattr(self,'rule',None): + return + name=str(getattr(self,'name',None)or self.target or getattr(self.rule,'__name__',self.rule)) + try: + cache=self.bld.cache_rule_attr + except AttributeError: + cache=self.bld.cache_rule_attr={} + cls=None + if getattr(self,'cache_rule','True'): + try: + cls=cache[(name,self.rule)] + except KeyError: + pass + if not cls: + cls=Task.task_factory(name,self.rule,getattr(self,'vars',[]),shell=getattr(self,'shell',True),color=getattr(self,'color','BLUE'),scan=getattr(self,'scan',None)) + if getattr(self,'scan',None): + cls.scan=self.scan + elif getattr(self,'deps',None): + def scan(self): + nodes=[] + for x in self.generator.to_list(getattr(self.generator,'deps',None)): + node=self.generator.path.find_resource(x) + if not node: + self.generator.bld.fatal('Could not find %r (was it declared?)'%x) + nodes.append(node) + return[nodes,[]] + cls.scan=scan + if getattr(self,'update_outputs',None): + Task.update_outputs(cls) + if getattr(self,'always',None): + Task.always_run(cls) + for x in['after','before','ext_in','ext_out']: + setattr(cls,x,getattr(self,x,[])) + if getattr(self,'cache_rule','True'): + cache[(name,self.rule)]=cls + tsk=self.create_task(name) + if getattr(self,'target',None): + if isinstance(self.target,str): + self.target=self.target.split() + if not isinstance(self.target,list): + self.target=[self.target] + for x in self.target: + if isinstance(x,str): + tsk.outputs.append(self.path.find_or_declare(x)) + else: + x.parent.mkdir() + tsk.outputs.append(x) + if getattr(self,'install_path',None): + self.bld.install_files(self.install_path,tsk.outputs) + if getattr(self,'source',None): + tsk.inputs=self.to_nodes(self.source) + self.source=[] + if getattr(self,'cwd',None): + tsk.cwd=self.cwd +@feature('seq') +def sequence_order(self): + if self.meths and self.meths[-1]!='sequence_order': + self.meths.append('sequence_order') + return + if getattr(self,'seq_start',None): + return + if getattr(self.bld,'prev',None): + self.bld.prev.post() + for x in self.bld.prev.tasks: + for y in self.tasks: + y.set_run_after(x) + self.bld.prev=self +re_m4=re.compile('@(\w+)@',re.M) +class subst_pc(Task.Task): + def run(self): + if getattr(self.generator,'is_copy',None): + self.outputs[0].write(self.inputs[0].read('rb'),'wb') + if getattr(self.generator,'chmod',None): + os.chmod(self.outputs[0].abspath(),self.generator.chmod) + return None + if getattr(self.generator,'fun',None): + return self.generator.fun(self) + code=self.inputs[0].read(encoding=getattr(self.generator,'encoding','ISO8859-1')) + if getattr(self.generator,'subst_fun',None): + code=self.generator.subst_fun(self,code) + if code is not None: + self.outputs[0].write(code,encoding=getattr(self.generator,'encoding','ISO8859-1')) + return + code=code.replace('%','%%') + lst=[] + def repl(match): + g=match.group + if g(1): + lst.append(g(1)) + return"%%(%s)s"%g(1) + return'' + global re_m4 + code=getattr(self.generator,'re_m4',re_m4).sub(repl,code) + try: + d=self.generator.dct + except AttributeError: + d={} + for x in lst: + tmp=getattr(self.generator,x,'')or self.env.get_flat(x)or self.env.get_flat(x.upper()) + d[x]=str(tmp) + code=code%d + self.outputs[0].write(code,encoding=getattr(self.generator,'encoding','ISO8859-1')) + self.generator.bld.raw_deps[self.uid()]=self.dep_vars=lst + try:delattr(self,'cache_sig') + except AttributeError:pass + if getattr(self.generator,'chmod',None): + os.chmod(self.outputs[0].abspath(),self.generator.chmod) + def sig_vars(self): + bld=self.generator.bld + env=self.env + upd=self.m.update + if getattr(self.generator,'fun',None): + upd(Utils.h_fun(self.generator.fun)) + if getattr(self.generator,'subst_fun',None): + upd(Utils.h_fun(self.generator.subst_fun)) + vars=self.generator.bld.raw_deps.get(self.uid(),[]) + act_sig=bld.hash_env_vars(env,vars) + upd(act_sig) + lst=[getattr(self.generator,x,'')for x in vars] + upd(Utils.h_list(lst)) + return self.m.digest() +@extension('.pc.in') +def add_pcfile(self,node): + tsk=self.create_task('subst_pc',node,node.change_ext('.pc','.pc.in')) + self.bld.install_files(getattr(self,'install_path','${LIBDIR}/pkgconfig/'),tsk.outputs) +class subst(subst_pc): + pass +@feature('subst') +@before_method('process_source','process_rule') +def process_subst(self): + src=Utils.to_list(getattr(self,'source',[])) + if isinstance(src,Node.Node): + src=[src] + tgt=Utils.to_list(getattr(self,'target',[])) + if isinstance(tgt,Node.Node): + tgt=[tgt] + if len(src)!=len(tgt): + raise Errors.WafError('invalid number of source/target for %r'%self) + for x,y in zip(src,tgt): + if not x or not y: + raise Errors.WafError('null source or target for %r'%self) + a,b=None,None + if isinstance(x,str)and isinstance(y,str)and x==y: + a=self.path.find_node(x) + b=self.path.get_bld().make_node(y) + if not os.path.isfile(b.abspath()): + b.sig=None + b.parent.mkdir() + else: + if isinstance(x,str): + a=self.path.find_resource(x) + elif isinstance(x,Node.Node): + a=x + if isinstance(y,str): + b=self.path.find_or_declare(y) + elif isinstance(y,Node.Node): + b=y + if not a: + raise Errors.WafError('cound not find %r for %r'%(x,self)) + has_constraints=False + tsk=self.create_task('subst',a,b) + for k in('after','before','ext_in','ext_out'): + val=getattr(self,k,None) + if val: + has_constraints=True + setattr(tsk,k,val) + if not has_constraints and b.name.endswith('.h'): + tsk.before=[k for k in('c','cxx')if k in Task.classes] + inst_to=getattr(self,'install_path',None) + if inst_to: + self.bld.install_files(inst_to,b,chmod=getattr(self,'chmod',Utils.O644)) + self.source=[] diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/TaskGen.pyc b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/TaskGen.pyc Binary files differnew file mode 100644 index 0000000..4d4c750 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/TaskGen.pyc diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__init__.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__init__.py new file mode 100644 index 0000000..efeed79 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__init__.py @@ -0,0 +1,4 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__init__.pyc b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__init__.pyc Binary files differnew file mode 100644 index 0000000..d963650 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__init__.pyc diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/ar.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/ar.py new file mode 100644 index 0000000..7a16dfe --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/ar.py @@ -0,0 +1,11 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +from waflib.Configure import conf +@conf +def find_ar(conf): + conf.load('ar') +def configure(conf): + conf.find_program('ar',var='AR') + conf.env.ARFLAGS='rcs' diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/ar.pyc b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/ar.pyc Binary files differnew file mode 100644 index 0000000..dd21f60 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/ar.pyc diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/asm.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/asm.py new file mode 100644 index 0000000..b9ed5f4 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/asm.py @@ -0,0 +1,25 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,sys +from waflib import Task,Utils +import waflib.Task +from waflib.Tools.ccroot import link_task,stlink_task +from waflib.TaskGen import extension,feature +class asm(Task.Task): + color='BLUE' + run_str='${AS} ${ASFLAGS} ${ASMPATH_ST:INCPATHS} ${AS_SRC_F}${SRC} ${AS_TGT_F}${TGT}' +@extension('.s','.S','.asm','.ASM','.spp','.SPP') +def asm_hook(self,node): + return self.create_compiled_task('asm',node) +class asmprogram(link_task): + run_str='${ASLINK} ${ASLINKFLAGS} ${ASLNK_TGT_F}${TGT} ${ASLNK_SRC_F}${SRC}' + ext_out=['.bin'] + inst_to='${BINDIR}' +class asmshlib(asmprogram): + inst_to='${LIBDIR}' +class asmstlib(stlink_task): + pass +def configure(conf): + conf.env['ASMPATH_ST']='-I%s' diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/bison.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/bison.py new file mode 100644 index 0000000..6ae7898 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/bison.py @@ -0,0 +1,28 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +from waflib import Task +from waflib.TaskGen import extension +class bison(Task.Task): + color='BLUE' + run_str='${BISON} ${BISONFLAGS} ${SRC[0].abspath()} -o ${TGT[0].name}' + ext_out=['.h'] +@extension('.y','.yc','.yy') +def big_bison(self,node): + has_h='-d'in self.env['BISONFLAGS'] + outs=[] + if node.name.endswith('.yc'): + outs.append(node.change_ext('.tab.cc')) + if has_h: + outs.append(node.change_ext('.tab.hh')) + else: + outs.append(node.change_ext('.tab.c')) + if has_h: + outs.append(node.change_ext('.tab.h')) + tsk=self.create_task('bison',node,outs) + tsk.cwd=node.parent.get_bld().abspath() + self.source.append(outs[0]) +def configure(conf): + conf.find_program('bison',var='BISON') + conf.env.BISONFLAGS=['-d'] diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c.py new file mode 100644 index 0000000..4d8cbd5 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c.py @@ -0,0 +1,24 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +from waflib import TaskGen,Task,Utils +from waflib.Tools import c_preproc +from waflib.Tools.ccroot import link_task,stlink_task +@TaskGen.extension('.c') +def c_hook(self,node): + return self.create_compiled_task('c',node) +class c(Task.Task): + run_str='${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT}' + vars=['CCDEPS'] + ext_in=['.h'] + scan=c_preproc.scan +class cprogram(link_task): + run_str='${LINK_CC} ${LINKFLAGS} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB}' + ext_out=['.bin'] + vars=['LINKDEPS'] + inst_to='${BINDIR}' +class cshlib(cprogram): + inst_to='${LIBDIR}' +class cstlib(stlink_task): + pass diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c.pyc b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c.pyc Binary files differnew file mode 100644 index 0000000..4e9f76c --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c.pyc diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_aliases.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_aliases.py new file mode 100644 index 0000000..a3a2bb9 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_aliases.py @@ -0,0 +1,55 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,sys,re +from waflib import Utils,Build +from waflib.Configure import conf +def get_extensions(lst): + ret=[] + for x in Utils.to_list(lst): + try: + if not isinstance(x,str): + x=x.name + ret.append(x[x.rfind('.')+1:]) + except Exception: + pass + return ret +def sniff_features(**kw): + exts=get_extensions(kw['source']) + type=kw['_type'] + feats=[] + if'cxx'in exts or'cpp'in exts or'c++'in exts or'cc'in exts or'C'in exts: + feats.append('cxx') + if'c'in exts or'vala'in exts: + feats.append('c') + if'd'in exts: + feats.append('d') + if'java'in exts: + feats.append('java') + if'java'in exts: + return'java' + if type in['program','shlib','stlib']: + for x in feats: + if x in['cxx','d','c']: + feats.append(x+type) + return feats +def set_features(kw,_type): + kw['_type']=_type + kw['features']=Utils.to_list(kw.get('features',[]))+Utils.to_list(sniff_features(**kw)) +@conf +def program(bld,*k,**kw): + set_features(kw,'program') + return bld(*k,**kw) +@conf +def shlib(bld,*k,**kw): + set_features(kw,'shlib') + return bld(*k,**kw) +@conf +def stlib(bld,*k,**kw): + set_features(kw,'stlib') + return bld(*k,**kw) +@conf +def objects(bld,*k,**kw): + set_features(kw,'objects') + return bld(*k,**kw) diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_aliases.pyc b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_aliases.pyc Binary files differnew file mode 100644 index 0000000..257d3a5 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_aliases.pyc diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_config.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_config.py new file mode 100755 index 0000000..e1c8e73 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_config.py @@ -0,0 +1,751 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,re,shlex,sys +from waflib import Build,Utils,Task,Options,Logs,Errors,ConfigSet,Runner +from waflib.TaskGen import after_method,feature +from waflib.Configure import conf +WAF_CONFIG_H='config.h' +DEFKEYS='define_key' +INCKEYS='include_key' +cfg_ver={'atleast-version':'>=','exact-version':'==','max-version':'<=',} +SNIP_FUNCTION=''' +int main(int argc, char **argv) { + void *p; + (void)argc; (void)argv; + p=(void*)(%s); + return 0; +} +''' +SNIP_TYPE=''' +int main(int argc, char **argv) { + (void)argc; (void)argv; + if ((%(type_name)s *) 0) return 0; + if (sizeof (%(type_name)s)) return 0; + return 1; +} +''' +SNIP_EMPTY_PROGRAM=''' +int main(int argc, char **argv) { + (void)argc; (void)argv; + return 0; +} +''' +SNIP_FIELD=''' +int main(int argc, char **argv) { + char *off; + (void)argc; (void)argv; + off = (char*) &((%(type_name)s*)0)->%(field_name)s; + return (size_t) off < sizeof(%(type_name)s); +} +''' +MACRO_TO_DESTOS={'__linux__':'linux','__GNU__':'gnu','__FreeBSD__':'freebsd','__NetBSD__':'netbsd','__OpenBSD__':'openbsd','__sun':'sunos','__hpux':'hpux','__sgi':'irix','_AIX':'aix','__CYGWIN__':'cygwin','__MSYS__':'msys','_UWIN':'uwin','_WIN64':'win32','_WIN32':'win32','__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__':'darwin','__ENVIRONMENT_IPHONE_OS_VERSION_MIN_REQUIRED__':'darwin','__QNX__':'qnx','__native_client__':'nacl'} +MACRO_TO_DEST_CPU={'__x86_64__':'x86_64','__amd64__':'x86_64','__i386__':'x86','__ia64__':'ia','__mips__':'mips','__sparc__':'sparc','__alpha__':'alpha','__aarch64__':'aarch64','__thumb__':'thumb','__arm__':'arm','__hppa__':'hppa','__powerpc__':'powerpc','__ppc__':'powerpc','__convex__':'convex','__m68k__':'m68k','__s390x__':'s390x','__s390__':'s390','__sh__':'sh',} +@conf +def parse_flags(self,line,uselib_store,env=None,force_static=False): + assert(isinstance(line,str)) + env=env or self.env + app=env.append_value + appu=env.append_unique + lex=shlex.shlex(line,posix=False) + lex.whitespace_split=True + lex.commenters='' + lst=list(lex) + uselib=uselib_store + while lst: + x=lst.pop(0) + st=x[:2] + ot=x[2:] + if st=='-I'or st=='/I': + if not ot:ot=lst.pop(0) + appu('INCLUDES_'+uselib,[ot]) + elif st=='-include': + tmp=[x,lst.pop(0)] + app('CFLAGS',tmp) + app('CXXFLAGS',tmp) + elif st=='-D'or(env.CXX_NAME=='msvc'and st=='/D'): + if not ot:ot=lst.pop(0) + app('DEFINES_'+uselib,[ot]) + elif st=='-l': + if not ot:ot=lst.pop(0) + prefix=force_static and'STLIB_'or'LIB_' + appu(prefix+uselib,[ot]) + elif st=='-L': + if not ot:ot=lst.pop(0) + appu('LIBPATH_'+uselib,[ot]) + elif x.startswith('/LIBPATH:'): + appu('LIBPATH_'+uselib,[x.replace('/LIBPATH:','')]) + elif x=='-pthread'or x.startswith('+')or x.startswith('-std'): + app('CFLAGS_'+uselib,[x]) + app('CXXFLAGS_'+uselib,[x]) + app('LINKFLAGS_'+uselib,[x]) + elif x=='-framework': + appu('FRAMEWORK_'+uselib,[lst.pop(0)]) + elif x.startswith('-F'): + appu('FRAMEWORKPATH_'+uselib,[x[2:]]) + elif x.startswith('-Wl'): + app('LINKFLAGS_'+uselib,[x]) + elif x.startswith('-m')or x.startswith('-f')or x.startswith('-dynamic'): + app('CFLAGS_'+uselib,[x]) + app('CXXFLAGS_'+uselib,[x]) + elif x.startswith('-bundle'): + app('LINKFLAGS_'+uselib,[x]) + elif x.startswith('-undefined'): + arg=lst.pop(0) + app('LINKFLAGS_'+uselib,[x,arg]) + elif x.startswith('-arch')or x.startswith('-isysroot'): + tmp=[x,lst.pop(0)] + app('CFLAGS_'+uselib,tmp) + app('CXXFLAGS_'+uselib,tmp) + app('LINKFLAGS_'+uselib,tmp) + elif x.endswith('.a')or x.endswith('.so')or x.endswith('.dylib')or x.endswith('.lib'): + appu('LINKFLAGS_'+uselib,[x]) +@conf +def ret_msg(self,f,kw): + if isinstance(f,str): + return f + return f(kw) +@conf +def validate_cfg(self,kw): + if not'path'in kw: + if not self.env.PKGCONFIG: + self.find_program('pkg-config',var='PKGCONFIG') + kw['path']=self.env.PKGCONFIG + if'atleast_pkgconfig_version'in kw: + if not'msg'in kw: + kw['msg']='Checking for pkg-config version >= %r'%kw['atleast_pkgconfig_version'] + return + if not'okmsg'in kw: + kw['okmsg']='yes' + if not'errmsg'in kw: + kw['errmsg']='not found' + if'modversion'in kw: + if not'msg'in kw: + kw['msg']='Checking for %r version'%kw['modversion'] + return + for x in cfg_ver.keys(): + y=x.replace('-','_') + if y in kw: + if not'package'in kw: + raise ValueError('%s requires a package'%x) + if not'msg'in kw: + kw['msg']='Checking for %r %s %s'%(kw['package'],cfg_ver[x],kw[y]) + return + if not'msg'in kw: + kw['msg']='Checking for %r'%(kw['package']or kw['path']) +@conf +def exec_cfg(self,kw): + def define_it(): + self.define(self.have_define(kw.get('uselib_store',kw['package'])),1,0) + if'atleast_pkgconfig_version'in kw: + cmd=[kw['path'],'--atleast-pkgconfig-version=%s'%kw['atleast_pkgconfig_version']] + self.cmd_and_log(cmd) + if not'okmsg'in kw: + kw['okmsg']='yes' + return + for x in cfg_ver: + y=x.replace('-','_') + if y in kw: + self.cmd_and_log([kw['path'],'--%s=%s'%(x,kw[y]),kw['package']]) + if not'okmsg'in kw: + kw['okmsg']='yes' + define_it() + break + if'modversion'in kw: + version=self.cmd_and_log([kw['path'],'--modversion',kw['modversion']]).strip() + self.define('%s_VERSION'%Utils.quote_define_name(kw.get('uselib_store',kw['modversion'])),version) + return version + lst=[kw['path']] + defi=kw.get('define_variable',None) + if not defi: + defi=self.env.PKG_CONFIG_DEFINES or{} + for key,val in defi.items(): + lst.append('--define-variable=%s=%s'%(key,val)) + static=False + if'args'in kw: + args=Utils.to_list(kw['args']) + if'--static'in args or'--static-libs'in args: + static=True + lst+=args + lst.extend(Utils.to_list(kw['package'])) + if'variables'in kw: + env=kw.get('env',self.env) + uselib=kw.get('uselib_store',kw['package'].upper()) + vars=Utils.to_list(kw['variables']) + for v in vars: + val=self.cmd_and_log(lst+['--variable='+v]).strip() + var='%s_%s'%(uselib,v) + env[var]=val + if not'okmsg'in kw: + kw['okmsg']='yes' + return + ret=self.cmd_and_log(lst) + if not'okmsg'in kw: + kw['okmsg']='yes' + define_it() + self.parse_flags(ret,kw.get('uselib_store',kw['package'].upper()),kw.get('env',self.env),force_static=static) + return ret +@conf +def check_cfg(self,*k,**kw): + if k: + lst=k[0].split() + kw['package']=lst[0] + kw['args']=' '.join(lst[1:]) + self.validate_cfg(kw) + if'msg'in kw: + self.start_msg(kw['msg']) + ret=None + try: + ret=self.exec_cfg(kw) + except self.errors.WafError: + if'errmsg'in kw: + self.end_msg(kw['errmsg'],'YELLOW') + if Logs.verbose>1: + raise + else: + self.fatal('The configuration failed') + else: + if not ret: + ret=True + kw['success']=ret + if'okmsg'in kw: + self.end_msg(self.ret_msg(kw['okmsg'],kw)) + return ret +@conf +def validate_c(self,kw): + if not'env'in kw: + kw['env']=self.env.derive() + env=kw['env'] + if not'compiler'in kw and not'features'in kw: + kw['compiler']='c' + if env['CXX_NAME']and Task.classes.get('cxx',None): + kw['compiler']='cxx' + if not self.env['CXX']: + self.fatal('a c++ compiler is required') + else: + if not self.env['CC']: + self.fatal('a c compiler is required') + if not'compile_mode'in kw: + kw['compile_mode']='c' + if'cxx'in Utils.to_list(kw.get('features',[]))or kw.get('compiler','')=='cxx': + kw['compile_mode']='cxx' + if not'type'in kw: + kw['type']='cprogram' + if not'features'in kw: + kw['features']=[kw['compile_mode'],kw['type']] + else: + kw['features']=Utils.to_list(kw['features']) + if not'compile_filename'in kw: + kw['compile_filename']='test.c'+((kw['compile_mode']=='cxx')and'pp'or'') + def to_header(dct): + if'header_name'in dct: + dct=Utils.to_list(dct['header_name']) + return''.join(['#include <%s>\n'%x for x in dct]) + return'' + if'framework_name'in kw: + fwkname=kw['framework_name'] + if not'uselib_store'in kw: + kw['uselib_store']=fwkname.upper() + if not kw.get('no_header',False): + if not'header_name'in kw: + kw['header_name']=[] + fwk='%s/%s.h'%(fwkname,fwkname) + if kw.get('remove_dot_h',None): + fwk=fwk[:-2] + kw['header_name']=Utils.to_list(kw['header_name'])+[fwk] + kw['msg']='Checking for framework %s'%fwkname + kw['framework']=fwkname + if'function_name'in kw: + fu=kw['function_name'] + if not'msg'in kw: + kw['msg']='Checking for function %s'%fu + kw['code']=to_header(kw)+SNIP_FUNCTION%fu + if not'uselib_store'in kw: + kw['uselib_store']=fu.upper() + if not'define_name'in kw: + kw['define_name']=self.have_define(fu) + elif'type_name'in kw: + tu=kw['type_name'] + if not'header_name'in kw: + kw['header_name']='stdint.h' + if'field_name'in kw: + field=kw['field_name'] + kw['code']=to_header(kw)+SNIP_FIELD%{'type_name':tu,'field_name':field} + if not'msg'in kw: + kw['msg']='Checking for field %s in %s'%(field,tu) + if not'define_name'in kw: + kw['define_name']=self.have_define((tu+'_'+field).upper()) + else: + kw['code']=to_header(kw)+SNIP_TYPE%{'type_name':tu} + if not'msg'in kw: + kw['msg']='Checking for type %s'%tu + if not'define_name'in kw: + kw['define_name']=self.have_define(tu.upper()) + elif'header_name'in kw: + if not'msg'in kw: + kw['msg']='Checking for header %s'%kw['header_name'] + l=Utils.to_list(kw['header_name']) + assert len(l)>0,'list of headers in header_name is empty' + kw['code']=to_header(kw)+SNIP_EMPTY_PROGRAM + if not'uselib_store'in kw: + kw['uselib_store']=l[0].upper() + if not'define_name'in kw: + kw['define_name']=self.have_define(l[0]) + if'lib'in kw: + if not'msg'in kw: + kw['msg']='Checking for library %s'%kw['lib'] + if not'uselib_store'in kw: + kw['uselib_store']=kw['lib'].upper() + if'stlib'in kw: + if not'msg'in kw: + kw['msg']='Checking for static library %s'%kw['stlib'] + if not'uselib_store'in kw: + kw['uselib_store']=kw['stlib'].upper() + if'fragment'in kw: + kw['code']=kw['fragment'] + if not'msg'in kw: + kw['msg']='Checking for code snippet' + if not'errmsg'in kw: + kw['errmsg']='no' + for(flagsname,flagstype)in[('cxxflags','compiler'),('cflags','compiler'),('linkflags','linker')]: + if flagsname in kw: + if not'msg'in kw: + kw['msg']='Checking for %s flags %s'%(flagstype,kw[flagsname]) + if not'errmsg'in kw: + kw['errmsg']='no' + if not'execute'in kw: + kw['execute']=False + if kw['execute']: + kw['features'].append('test_exec') + if not'errmsg'in kw: + kw['errmsg']='not found' + if not'okmsg'in kw: + kw['okmsg']='yes' + if not'code'in kw: + kw['code']=SNIP_EMPTY_PROGRAM + if self.env[INCKEYS]: + kw['code']='\n'.join(['#include <%s>'%x for x in self.env[INCKEYS]])+'\n'+kw['code'] + if not kw.get('success'):kw['success']=None + if'define_name'in kw: + self.undefine(kw['define_name']) + if not'msg'in kw: + self.fatal('missing "msg" in conf.check(...)') +@conf +def post_check(self,*k,**kw): + is_success=0 + if kw['execute']: + if kw['success']is not None: + if kw.get('define_ret',False): + is_success=kw['success'] + else: + is_success=(kw['success']==0) + else: + is_success=(kw['success']==0) + if'define_name'in kw: + if'header_name'in kw or'function_name'in kw or'type_name'in kw or'fragment'in kw: + if kw['execute']and kw.get('define_ret',None)and isinstance(is_success,str): + self.define(kw['define_name'],is_success,quote=kw.get('quote',1)) + else: + self.define_cond(kw['define_name'],is_success) + else: + self.define_cond(kw['define_name'],is_success) + if'header_name'in kw: + if kw.get('auto_add_header_name',False): + self.env.append_value(INCKEYS,Utils.to_list(kw['header_name'])) + if is_success and'uselib_store'in kw: + from waflib.Tools import ccroot + _vars=set([]) + for x in kw['features']: + if x in ccroot.USELIB_VARS: + _vars|=ccroot.USELIB_VARS[x] + for k in _vars: + lk=k.lower() + if lk in kw: + val=kw[lk] + if isinstance(val,str): + val=val.rstrip(os.path.sep) + self.env.append_unique(k+'_'+kw['uselib_store'],Utils.to_list(val)) + return is_success +@conf +def check(self,*k,**kw): + self.validate_c(kw) + self.start_msg(kw['msg']) + ret=None + try: + ret=self.run_c_code(*k,**kw) + except self.errors.ConfigurationError: + self.end_msg(kw['errmsg'],'YELLOW') + if Logs.verbose>1: + raise + else: + self.fatal('The configuration failed') + else: + kw['success']=ret + ret=self.post_check(*k,**kw) + if not ret: + self.end_msg(kw['errmsg'],'YELLOW') + self.fatal('The configuration failed %r'%ret) + else: + self.end_msg(self.ret_msg(kw['okmsg'],kw)) + return ret +class test_exec(Task.Task): + color='PINK' + def run(self): + if getattr(self.generator,'rpath',None): + if getattr(self.generator,'define_ret',False): + self.generator.bld.retval=self.generator.bld.cmd_and_log([self.inputs[0].abspath()]) + else: + self.generator.bld.retval=self.generator.bld.exec_command([self.inputs[0].abspath()]) + else: + env=self.env.env or{} + env.update(dict(os.environ)) + for var in('LD_LIBRARY_PATH','DYLD_LIBRARY_PATH','PATH'): + env[var]=self.inputs[0].parent.abspath()+os.path.pathsep+env.get(var,'') + if getattr(self.generator,'define_ret',False): + self.generator.bld.retval=self.generator.bld.cmd_and_log([self.inputs[0].abspath()],env=env) + else: + self.generator.bld.retval=self.generator.bld.exec_command([self.inputs[0].abspath()],env=env) +@feature('test_exec') +@after_method('apply_link') +def test_exec_fun(self): + self.create_task('test_exec',self.link_task.outputs[0]) +CACHE_RESULTS=1 +COMPILE_ERRORS=2 +@conf +def run_c_code(self,*k,**kw): + lst=[str(v)for(p,v)in kw.items()if p!='env'] + h=Utils.h_list(lst) + dir=self.bldnode.abspath()+os.sep+(not Utils.is_win32 and'.'or'')+'conf_check_'+Utils.to_hex(h) + try: + os.makedirs(dir) + except OSError: + pass + try: + os.stat(dir) + except OSError: + self.fatal('cannot use the configuration test folder %r'%dir) + cachemode=getattr(Options.options,'confcache',None) + if cachemode==CACHE_RESULTS: + try: + proj=ConfigSet.ConfigSet(os.path.join(dir,'cache_run_c_code')) + except OSError: + pass + else: + ret=proj['cache_run_c_code'] + if isinstance(ret,str)and ret.startswith('Test does not build'): + self.fatal(ret) + return ret + bdir=os.path.join(dir,'testbuild') + if not os.path.exists(bdir): + os.makedirs(bdir) + self.test_bld=bld=Build.BuildContext(top_dir=dir,out_dir=bdir) + bld.init_dirs() + bld.progress_bar=0 + bld.targets='*' + if kw['compile_filename']: + node=bld.srcnode.make_node(kw['compile_filename']) + node.write(kw['code']) + bld.logger=self.logger + bld.all_envs.update(self.all_envs) + bld.env=kw['env'] + o=bld(features=kw['features'],source=kw['compile_filename'],target='testprog') + for k,v in kw.items(): + setattr(o,k,v) + self.to_log("==>\n%s\n<=="%kw['code']) + bld.targets='*' + ret=-1 + try: + try: + bld.compile() + except Errors.WafError: + ret='Test does not build: %s'%Utils.ex_stack() + self.fatal(ret) + else: + ret=getattr(bld,'retval',0) + finally: + proj=ConfigSet.ConfigSet() + proj['cache_run_c_code']=ret + proj.store(os.path.join(dir,'cache_run_c_code')) + return ret +@conf +def check_cxx(self,*k,**kw): + kw['compiler']='cxx' + return self.check(*k,**kw) +@conf +def check_cc(self,*k,**kw): + kw['compiler']='c' + return self.check(*k,**kw) +@conf +def define(self,key,val,quote=True): + assert key and isinstance(key,str) + if val is True: + val=1 + elif val in(False,None): + val=0 + if isinstance(val,int)or isinstance(val,float): + s='%s=%s' + else: + s=quote and'%s="%s"'or'%s=%s' + app=s%(key,str(val)) + ban=key+'=' + lst=self.env['DEFINES'] + for x in lst: + if x.startswith(ban): + lst[lst.index(x)]=app + break + else: + self.env.append_value('DEFINES',app) + self.env.append_unique(DEFKEYS,key) +@conf +def undefine(self,key): + assert key and isinstance(key,str) + ban=key+'=' + lst=[x for x in self.env['DEFINES']if not x.startswith(ban)] + self.env['DEFINES']=lst + self.env.append_unique(DEFKEYS,key) +@conf +def define_cond(self,key,val): + assert key and isinstance(key,str) + if val: + self.define(key,1) + else: + self.undefine(key) +@conf +def is_defined(self,key): + assert key and isinstance(key,str) + ban=key+'=' + for x in self.env['DEFINES']: + if x.startswith(ban): + return True + return False +@conf +def get_define(self,key): + assert key and isinstance(key,str) + ban=key+'=' + for x in self.env['DEFINES']: + if x.startswith(ban): + return x[len(ban):] + return None +@conf +def have_define(self,key): + return(self.env.HAVE_PAT or'HAVE_%s')%Utils.quote_define_name(key) +@conf +def write_config_header(self,configfile='',guard='',top=False,env=None,defines=True,headers=False,remove=True,define_prefix=''): + if env: + Logs.warn('Cannot pass env to write_config_header') + if not configfile:configfile=WAF_CONFIG_H + waf_guard=guard or'W_%s_WAF'%Utils.quote_define_name(configfile) + node=top and self.bldnode or self.path.get_bld() + node=node.make_node(configfile) + node.parent.mkdir() + lst=['/* WARNING! All changes made to this file will be lost! */\n'] + lst.append('#ifndef %s\n#define %s\n'%(waf_guard,waf_guard)) + lst.append(self.get_config_header(defines,headers,define_prefix=define_prefix)) + lst.append('\n#endif /* %s */\n'%waf_guard) + node.write('\n'.join(lst)) + self.env.append_unique(Build.CFG_FILES,[node.abspath()]) + if remove: + for key in self.env[DEFKEYS]: + self.undefine(key) + self.env[DEFKEYS]=[] +@conf +def get_config_header(self,defines=True,headers=False,define_prefix=''): + lst=[] + if headers: + for x in self.env[INCKEYS]: + lst.append('#include <%s>'%x) + if defines: + for x in self.env[DEFKEYS]: + if self.is_defined(x): + val=self.get_define(x) + lst.append('#define %s %s'%(define_prefix+x,val)) + else: + lst.append('/* #undef %s */'%(define_prefix+x)) + return"\n".join(lst) +@conf +def cc_add_flags(conf): + conf.add_os_flags('CPPFLAGS','CFLAGS') + conf.add_os_flags('CFLAGS') +@conf +def cxx_add_flags(conf): + conf.add_os_flags('CPPFLAGS','CXXFLAGS') + conf.add_os_flags('CXXFLAGS') +@conf +def link_add_flags(conf): + conf.add_os_flags('LINKFLAGS') + conf.add_os_flags('LDFLAGS','LINKFLAGS') +@conf +def cc_load_tools(conf): + if not conf.env.DEST_OS: + conf.env.DEST_OS=Utils.unversioned_sys_platform() + conf.load('c') +@conf +def cxx_load_tools(conf): + if not conf.env.DEST_OS: + conf.env.DEST_OS=Utils.unversioned_sys_platform() + conf.load('cxx') +@conf +def get_cc_version(conf,cc,gcc=False,icc=False): + cmd=cc+['-dM','-E','-'] + env=conf.env.env or None + try: + p=Utils.subprocess.Popen(cmd,stdin=Utils.subprocess.PIPE,stdout=Utils.subprocess.PIPE,stderr=Utils.subprocess.PIPE,env=env) + p.stdin.write('\n') + out=p.communicate()[0] + except Exception: + conf.fatal('Could not determine the compiler version %r'%cmd) + if not isinstance(out,str): + out=out.decode(sys.stdout.encoding or'iso8859-1') + if gcc: + if out.find('__INTEL_COMPILER')>=0: + conf.fatal('The intel compiler pretends to be gcc') + if out.find('__GNUC__')<0 and out.find('__clang__')<0: + conf.fatal('Could not determine the compiler type') + if icc and out.find('__INTEL_COMPILER')<0: + conf.fatal('Not icc/icpc') + k={} + if icc or gcc: + out=out.splitlines() + for line in out: + lst=shlex.split(line) + if len(lst)>2: + key=lst[1] + val=lst[2] + k[key]=val + def isD(var): + return var in k + def isT(var): + return var in k and k[var]!='0' + if not conf.env.DEST_OS: + conf.env.DEST_OS='' + for i in MACRO_TO_DESTOS: + if isD(i): + conf.env.DEST_OS=MACRO_TO_DESTOS[i] + break + else: + if isD('__APPLE__')and isD('__MACH__'): + conf.env.DEST_OS='darwin' + elif isD('__unix__'): + conf.env.DEST_OS='generic' + if isD('__ELF__'): + conf.env.DEST_BINFMT='elf' + elif isD('__WINNT__')or isD('__CYGWIN__')or isD('_WIN32'): + conf.env.DEST_BINFMT='pe' + conf.env.LIBDIR=conf.env.BINDIR + elif isD('__APPLE__'): + conf.env.DEST_BINFMT='mac-o' + if not conf.env.DEST_BINFMT: + conf.env.DEST_BINFMT=Utils.destos_to_binfmt(conf.env.DEST_OS) + for i in MACRO_TO_DEST_CPU: + if isD(i): + conf.env.DEST_CPU=MACRO_TO_DEST_CPU[i] + break + Logs.debug('ccroot: dest platform: '+' '.join([conf.env[x]or'?'for x in('DEST_OS','DEST_BINFMT','DEST_CPU')])) + if icc: + ver=k['__INTEL_COMPILER'] + conf.env['CC_VERSION']=(ver[:-2],ver[-2],ver[-1]) + else: + if isD('__clang__'): + conf.env['CC_VERSION']=(k['__clang_major__'],k['__clang_minor__'],k['__clang_patchlevel__']) + else: + try: + conf.env['CC_VERSION']=(k['__GNUC__'],k['__GNUC_MINOR__'],k['__GNUC_PATCHLEVEL__']) + except KeyError: + conf.env['CC_VERSION']=(k['__GNUC__'],k['__GNUC_MINOR__'],0) + return k +@conf +def get_xlc_version(conf,cc): + cmd=cc+['-qversion'] + try: + out,err=conf.cmd_and_log(cmd,output=0) + except Errors.WafError: + conf.fatal('Could not find xlc %r'%cmd) + for v in(r"IBM XL C/C\+\+.* V(?P<major>\d*)\.(?P<minor>\d*)",): + version_re=re.compile(v,re.I).search + match=version_re(out or err) + if match: + k=match.groupdict() + conf.env['CC_VERSION']=(k['major'],k['minor']) + break + else: + conf.fatal('Could not determine the XLC version.') +@conf +def get_suncc_version(conf,cc): + cmd=cc+['-V'] + try: + out,err=conf.cmd_and_log(cmd,output=0) + except Errors.WafError ,e: + if not(hasattr(e,'returncode')and hasattr(e,'stdout')and hasattr(e,'stderr')): + conf.fatal('Could not find suncc %r'%cmd) + out=e.stdout + err=e.stderr + version=(out or err) + version=version.split('\n')[0] + version_re=re.compile(r'cc:\s+sun\s+(c\+\+|c)\s+(?P<major>\d*)\.(?P<minor>\d*)',re.I).search + match=version_re(version) + if match: + k=match.groupdict() + conf.env['CC_VERSION']=(k['major'],k['minor']) + else: + conf.fatal('Could not determine the suncc version.') +@conf +def add_as_needed(self): + if self.env.DEST_BINFMT=='elf'and'gcc'in(self.env.CXX_NAME,self.env.CC_NAME): + self.env.append_unique('LINKFLAGS','--as-needed') +class cfgtask(Task.TaskBase): + def display(self): + return'' + def runnable_status(self): + return Task.RUN_ME + def uid(self): + return Utils.SIG_NIL + def run(self): + conf=self.conf + bld=Build.BuildContext(top_dir=conf.srcnode.abspath(),out_dir=conf.bldnode.abspath()) + bld.env=conf.env + bld.init_dirs() + bld.in_msg=1 + bld.logger=self.logger + try: + bld.check(**self.args) + except Exception: + return 1 +@conf +def multicheck(self,*k,**kw): + self.start_msg(kw.get('msg','Executing %d configuration tests'%len(k))) + class par(object): + def __init__(self): + self.keep=False + self.cache_global=Options.cache_global + self.nocache=Options.options.nocache + self.returned_tasks=[] + self.task_sigs={} + def total(self): + return len(tasks) + def to_log(self,*k,**kw): + return + bld=par() + tasks=[] + for dct in k: + x=cfgtask(bld=bld) + tasks.append(x) + x.args=dct + x.bld=bld + x.conf=self + x.args=dct + x.logger=Logs.make_mem_logger(str(id(x)),self.logger) + def it(): + yield tasks + while 1: + yield[] + p=Runner.Parallel(bld,Options.options.jobs) + p.biter=it() + p.start() + for x in tasks: + x.logger.memhandler.flush() + for x in tasks: + if x.hasrun!=Task.SUCCESS: + self.end_msg(kw.get('errmsg','no'),color='YELLOW') + self.fatal(kw.get('fatalmsg',None)or'One of the tests has failed, see the config.log for more information') + self.end_msg('ok') diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_config.pyc b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_config.pyc Binary files differnew file mode 100644 index 0000000..af8787e --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_config.pyc diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_osx.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_osx.py new file mode 100644 index 0000000..579b2a7 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_osx.py @@ -0,0 +1,120 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,shutil,sys,platform +from waflib import TaskGen,Task,Build,Options,Utils,Errors +from waflib.TaskGen import taskgen_method,feature,after_method,before_method +app_info=''' +<?xml version="1.0" encoding="UTF-8"?> +<!DOCTYPE plist SYSTEM "file://localhost/System/Library/DTDs/PropertyList.dtd"> +<plist version="0.9"> +<dict> + <key>CFBundlePackageType</key> + <string>APPL</string> + <key>CFBundleGetInfoString</key> + <string>Created by Waf</string> + <key>CFBundleSignature</key> + <string>????</string> + <key>NOTE</key> + <string>THIS IS A GENERATED FILE, DO NOT MODIFY</string> + <key>CFBundleExecutable</key> + <string>%s</string> +</dict> +</plist> +''' +@feature('c','cxx') +def set_macosx_deployment_target(self): + if self.env['MACOSX_DEPLOYMENT_TARGET']: + os.environ['MACOSX_DEPLOYMENT_TARGET']=self.env['MACOSX_DEPLOYMENT_TARGET'] + elif'MACOSX_DEPLOYMENT_TARGET'not in os.environ: + if Utils.unversioned_sys_platform()=='darwin': + os.environ['MACOSX_DEPLOYMENT_TARGET']='.'.join(platform.mac_ver()[0].split('.')[:2]) +@taskgen_method +def create_bundle_dirs(self,name,out): + bld=self.bld + dir=out.parent.find_or_declare(name) + dir.mkdir() + macos=dir.find_or_declare(['Contents','MacOS']) + macos.mkdir() + return dir +def bundle_name_for_output(out): + name=out.name + k=name.rfind('.') + if k>=0: + name=name[:k]+'.app' + else: + name=name+'.app' + return name +@feature('cprogram','cxxprogram') +@after_method('apply_link') +def create_task_macapp(self): + if self.env['MACAPP']or getattr(self,'mac_app',False): + out=self.link_task.outputs[0] + name=bundle_name_for_output(out) + dir=self.create_bundle_dirs(name,out) + n1=dir.find_or_declare(['Contents','MacOS',out.name]) + self.apptask=self.create_task('macapp',self.link_task.outputs,n1) + inst_to=getattr(self,'install_path','/Applications')+'/%s/Contents/MacOS/'%name + self.bld.install_files(inst_to,n1,chmod=Utils.O755) + if getattr(self,'mac_resources',None): + res_dir=n1.parent.parent.make_node('Resources') + inst_to=getattr(self,'install_path','/Applications')+'/%s/Resources'%name + for x in self.to_list(self.mac_resources): + node=self.path.find_node(x) + if not node: + raise Errors.WafError('Missing mac_resource %r in %r'%(x,self)) + parent=node.parent + if os.path.isdir(node.abspath()): + nodes=node.ant_glob('**') + else: + nodes=[node] + for node in nodes: + rel=node.path_from(parent) + tsk=self.create_task('macapp',node,res_dir.make_node(rel)) + self.bld.install_as(inst_to+'/%s'%rel,node) + if getattr(self.bld,'is_install',None): + self.install_task.hasrun=Task.SKIP_ME +@feature('cprogram','cxxprogram') +@after_method('apply_link') +def create_task_macplist(self): + if self.env['MACAPP']or getattr(self,'mac_app',False): + out=self.link_task.outputs[0] + name=bundle_name_for_output(out) + dir=self.create_bundle_dirs(name,out) + n1=dir.find_or_declare(['Contents','Info.plist']) + self.plisttask=plisttask=self.create_task('macplist',[],n1) + if getattr(self,'mac_plist',False): + node=self.path.find_resource(self.mac_plist) + if node: + plisttask.inputs.append(node) + else: + plisttask.code=self.mac_plist + else: + plisttask.code=app_info%self.link_task.outputs[0].name + inst_to=getattr(self,'install_path','/Applications')+'/%s/Contents/'%name + self.bld.install_files(inst_to,n1) +@feature('cshlib','cxxshlib') +@before_method('apply_link','propagate_uselib_vars') +def apply_bundle(self): + if self.env['MACBUNDLE']or getattr(self,'mac_bundle',False): + self.env['LINKFLAGS_cshlib']=self.env['LINKFLAGS_cxxshlib']=[] + self.env['cshlib_PATTERN']=self.env['cxxshlib_PATTERN']=self.env['macbundle_PATTERN'] + use=self.use=self.to_list(getattr(self,'use',[])) + if not'MACBUNDLE'in use: + use.append('MACBUNDLE') +app_dirs=['Contents','Contents/MacOS','Contents/Resources'] +class macapp(Task.Task): + color='PINK' + def run(self): + self.outputs[0].parent.mkdir() + shutil.copy2(self.inputs[0].srcpath(),self.outputs[0].abspath()) +class macplist(Task.Task): + color='PINK' + ext_in=['.bin'] + def run(self): + if getattr(self,'code',None): + txt=self.code + else: + txt=self.inputs[0].read() + self.outputs[0].write(txt) diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_osx.pyc b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_osx.pyc Binary files differnew file mode 100644 index 0000000..67eb056 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_osx.pyc diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_preproc.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_preproc.py new file mode 100644 index 0000000..1f99cd3 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_preproc.py @@ -0,0 +1,607 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import re,string,traceback +from waflib import Logs,Utils,Errors +from waflib.Logs import debug,error +class PreprocError(Errors.WafError): + pass +POPFILE='-' +recursion_limit=150 +go_absolute=False +standard_includes=['/usr/include'] +if Utils.is_win32: + standard_includes=[] +use_trigraphs=0 +strict_quotes=0 +g_optrans={'not':'!','and':'&&','bitand':'&','and_eq':'&=','or':'||','bitor':'|','or_eq':'|=','xor':'^','xor_eq':'^=','compl':'~',} +re_lines=re.compile('^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',re.IGNORECASE|re.MULTILINE) +re_mac=re.compile("^[a-zA-Z_]\w*") +re_fun=re.compile('^[a-zA-Z_][a-zA-Z0-9_]*[(]') +re_pragma_once=re.compile('^\s*once\s*',re.IGNORECASE) +re_nl=re.compile('\\\\\r*\n',re.MULTILINE) +re_cpp=re.compile(r'//.*?$|/\*.*?\*/|\'(?:\\.|[^\\\'])*\'|"(?:\\.|[^\\"])*"',re.DOTALL|re.MULTILINE) +trig_def=[('??'+a,b)for a,b in zip("=-/!'()<>",r'#~\|^[]{}')] +chr_esc={'0':0,'a':7,'b':8,'t':9,'n':10,'f':11,'v':12,'r':13,'\\':92,"'":39} +NUM='i' +OP='O' +IDENT='T' +STR='s' +CHAR='c' +tok_types=[NUM,STR,IDENT,OP] +exp_types=[r"""0[xX](?P<hex>[a-fA-F0-9]+)(?P<qual1>[uUlL]*)|L*?'(?P<char>(\\.|[^\\'])+)'|(?P<n1>\d+)[Ee](?P<exp0>[+-]*?\d+)(?P<float0>[fFlL]*)|(?P<n2>\d*\.\d+)([Ee](?P<exp1>[+-]*?\d+))?(?P<float1>[fFlL]*)|(?P<n4>\d+\.\d*)([Ee](?P<exp2>[+-]*?\d+))?(?P<float2>[fFlL]*)|(?P<oct>0*)(?P<n0>\d+)(?P<qual2>[uUlL]*)""",r'L?"([^"\\]|\\.)*"',r'[a-zA-Z_]\w*',r'%:%:|<<=|>>=|\.\.\.|<<|<%|<:|<=|>>|>=|\+\+|\+=|--|->|-=|\*=|/=|%:|%=|%>|==|&&|&=|\|\||\|=|\^=|:>|!=|##|[\(\)\{\}\[\]<>\?\|\^\*\+&=:!#;,%/\-\?\~\.]',] +re_clexer=re.compile('|'.join(["(?P<%s>%s)"%(name,part)for name,part in zip(tok_types,exp_types)]),re.M) +accepted='a' +ignored='i' +undefined='u' +skipped='s' +def repl(m): + s=m.group(0) + if s.startswith('/'): + return' ' + return s +def filter_comments(filename): + code=Utils.readf(filename) + if use_trigraphs: + for(a,b)in trig_def:code=code.split(a).join(b) + code=re_nl.sub('',code) + code=re_cpp.sub(repl,code) + return[(m.group(2),m.group(3))for m in re.finditer(re_lines,code)] +prec={} +ops=['* / %','+ -','<< >>','< <= >= >','== !=','& | ^','&& ||',','] +for x in range(len(ops)): + syms=ops[x] + for u in syms.split(): + prec[u]=x +def trimquotes(s): + if not s:return'' + s=s.rstrip() + if s[0]=="'"and s[-1]=="'":return s[1:-1] + return s +def reduce_nums(val_1,val_2,val_op): + try:a=0+val_1 + except TypeError:a=int(val_1) + try:b=0+val_2 + except TypeError:b=int(val_2) + d=val_op + if d=='%':c=a%b + elif d=='+':c=a+b + elif d=='-':c=a-b + elif d=='*':c=a*b + elif d=='/':c=a/b + elif d=='^':c=a^b + elif d=='|':c=a|b + elif d=='||':c=int(a or b) + elif d=='&':c=a&b + elif d=='&&':c=int(a and b) + elif d=='==':c=int(a==b) + elif d=='!=':c=int(a!=b) + elif d=='<=':c=int(a<=b) + elif d=='<':c=int(a<b) + elif d=='>':c=int(a>b) + elif d=='>=':c=int(a>=b) + elif d=='^':c=int(a^b) + elif d=='<<':c=a<<b + elif d=='>>':c=a>>b + else:c=0 + return c +def get_num(lst): + if not lst:raise PreprocError("empty list for get_num") + (p,v)=lst[0] + if p==OP: + if v=='(': + count_par=1 + i=1 + while i<len(lst): + (p,v)=lst[i] + if p==OP: + if v==')': + count_par-=1 + if count_par==0: + break + elif v=='(': + count_par+=1 + i+=1 + else: + raise PreprocError("rparen expected %r"%lst) + (num,_)=get_term(lst[1:i]) + return(num,lst[i+1:]) + elif v=='+': + return get_num(lst[1:]) + elif v=='-': + num,lst=get_num(lst[1:]) + return(reduce_nums('-1',num,'*'),lst) + elif v=='!': + num,lst=get_num(lst[1:]) + return(int(not int(num)),lst) + elif v=='~': + num,lst=get_num(lst[1:]) + return(~int(num),lst) + else: + raise PreprocError("Invalid op token %r for get_num"%lst) + elif p==NUM: + return v,lst[1:] + elif p==IDENT: + return 0,lst[1:] + else: + raise PreprocError("Invalid token %r for get_num"%lst) +def get_term(lst): + if not lst:raise PreprocError("empty list for get_term") + num,lst=get_num(lst) + if not lst: + return(num,[]) + (p,v)=lst[0] + if p==OP: + if v==',': + return get_term(lst[1:]) + elif v=='?': + count_par=0 + i=1 + while i<len(lst): + (p,v)=lst[i] + if p==OP: + if v==')': + count_par-=1 + elif v=='(': + count_par+=1 + elif v==':': + if count_par==0: + break + i+=1 + else: + raise PreprocError("rparen expected %r"%lst) + if int(num): + return get_term(lst[1:i]) + else: + return get_term(lst[i+1:]) + else: + num2,lst=get_num(lst[1:]) + if not lst: + num2=reduce_nums(num,num2,v) + return get_term([(NUM,num2)]+lst) + p2,v2=lst[0] + if p2!=OP: + raise PreprocError("op expected %r"%lst) + if prec[v2]>=prec[v]: + num2=reduce_nums(num,num2,v) + return get_term([(NUM,num2)]+lst) + else: + num3,lst=get_num(lst[1:]) + num3=reduce_nums(num2,num3,v2) + return get_term([(NUM,num),(p,v),(NUM,num3)]+lst) + raise PreprocError("cannot reduce %r"%lst) +def reduce_eval(lst): + num,lst=get_term(lst) + return(NUM,num) +def stringize(lst): + lst=[str(v2)for(p2,v2)in lst] + return"".join(lst) +def paste_tokens(t1,t2): + p1=None + if t1[0]==OP and t2[0]==OP: + p1=OP + elif t1[0]==IDENT and(t2[0]==IDENT or t2[0]==NUM): + p1=IDENT + elif t1[0]==NUM and t2[0]==NUM: + p1=NUM + if not p1: + raise PreprocError('tokens do not make a valid paste %r and %r'%(t1,t2)) + return(p1,t1[1]+t2[1]) +def reduce_tokens(lst,defs,ban=[]): + i=0 + while i<len(lst): + (p,v)=lst[i] + if p==IDENT and v=="defined": + del lst[i] + if i<len(lst): + (p2,v2)=lst[i] + if p2==IDENT: + if v2 in defs: + lst[i]=(NUM,1) + else: + lst[i]=(NUM,0) + elif p2==OP and v2=='(': + del lst[i] + (p2,v2)=lst[i] + del lst[i] + if v2 in defs: + lst[i]=(NUM,1) + else: + lst[i]=(NUM,0) + else: + raise PreprocError("Invalid define expression %r"%lst) + elif p==IDENT and v in defs: + if isinstance(defs[v],str): + a,b=extract_macro(defs[v]) + defs[v]=b + macro_def=defs[v] + to_add=macro_def[1] + if isinstance(macro_def[0],list): + del lst[i] + accu=to_add[:] + reduce_tokens(accu,defs,ban+[v]) + for x in range(len(accu)): + lst.insert(i,accu[x]) + i+=1 + else: + args=[] + del lst[i] + if i>=len(lst): + raise PreprocError("expected '(' after %r (got nothing)"%v) + (p2,v2)=lst[i] + if p2!=OP or v2!='(': + raise PreprocError("expected '(' after %r"%v) + del lst[i] + one_param=[] + count_paren=0 + while i<len(lst): + p2,v2=lst[i] + del lst[i] + if p2==OP and count_paren==0: + if v2=='(': + one_param.append((p2,v2)) + count_paren+=1 + elif v2==')': + if one_param:args.append(one_param) + break + elif v2==',': + if not one_param:raise PreprocError("empty param in funcall %s"%v) + args.append(one_param) + one_param=[] + else: + one_param.append((p2,v2)) + else: + one_param.append((p2,v2)) + if v2=='(':count_paren+=1 + elif v2==')':count_paren-=1 + else: + raise PreprocError('malformed macro') + accu=[] + arg_table=macro_def[0] + j=0 + while j<len(to_add): + (p2,v2)=to_add[j] + if p2==OP and v2=='#': + if j+1<len(to_add)and to_add[j+1][0]==IDENT and to_add[j+1][1]in arg_table: + toks=args[arg_table[to_add[j+1][1]]] + accu.append((STR,stringize(toks))) + j+=1 + else: + accu.append((p2,v2)) + elif p2==OP and v2=='##': + if accu and j+1<len(to_add): + t1=accu[-1] + if to_add[j+1][0]==IDENT and to_add[j+1][1]in arg_table: + toks=args[arg_table[to_add[j+1][1]]] + if toks: + accu[-1]=paste_tokens(t1,toks[0]) + accu.extend(toks[1:]) + else: + accu.append((p2,v2)) + accu.extend(toks) + elif to_add[j+1][0]==IDENT and to_add[j+1][1]=='__VA_ARGS__': + va_toks=[] + st=len(macro_def[0]) + pt=len(args) + for x in args[pt-st+1:]: + va_toks.extend(x) + va_toks.append((OP,',')) + if va_toks:va_toks.pop() + if len(accu)>1: + (p3,v3)=accu[-1] + (p4,v4)=accu[-2] + if v3=='##': + accu.pop() + if v4==','and pt<st: + accu.pop() + accu+=va_toks + else: + accu[-1]=paste_tokens(t1,to_add[j+1]) + j+=1 + else: + accu.append((p2,v2)) + elif p2==IDENT and v2 in arg_table: + toks=args[arg_table[v2]] + reduce_tokens(toks,defs,ban+[v]) + accu.extend(toks) + else: + accu.append((p2,v2)) + j+=1 + reduce_tokens(accu,defs,ban+[v]) + for x in range(len(accu)-1,-1,-1): + lst.insert(i,accu[x]) + i+=1 +def eval_macro(lst,defs): + reduce_tokens(lst,defs,[]) + if not lst:raise PreprocError("missing tokens to evaluate") + (p,v)=reduce_eval(lst) + return int(v)!=0 +def extract_macro(txt): + t=tokenize(txt) + if re_fun.search(txt): + p,name=t[0] + p,v=t[1] + if p!=OP:raise PreprocError("expected open parenthesis") + i=1 + pindex=0 + params={} + prev='(' + while 1: + i+=1 + p,v=t[i] + if prev=='(': + if p==IDENT: + params[v]=pindex + pindex+=1 + prev=p + elif p==OP and v==')': + break + else: + raise PreprocError("unexpected token (3)") + elif prev==IDENT: + if p==OP and v==',': + prev=v + elif p==OP and v==')': + break + else: + raise PreprocError("comma or ... expected") + elif prev==',': + if p==IDENT: + params[v]=pindex + pindex+=1 + prev=p + elif p==OP and v=='...': + raise PreprocError("not implemented (1)") + else: + raise PreprocError("comma or ... expected (2)") + elif prev=='...': + raise PreprocError("not implemented (2)") + else: + raise PreprocError("unexpected else") + return(name,[params,t[i+1:]]) + else: + (p,v)=t[0] + if len(t)>1: + return(v,[[],t[1:]]) + else: + return(v,[[],[('T','')]]) +re_include=re.compile('^\s*(<(?P<a>.*)>|"(?P<b>.*)")') +def extract_include(txt,defs): + m=re_include.search(txt) + if m: + if m.group('a'):return'<',m.group('a') + if m.group('b'):return'"',m.group('b') + toks=tokenize(txt) + reduce_tokens(toks,defs,['waf_include']) + if not toks: + raise PreprocError("could not parse include %s"%txt) + if len(toks)==1: + if toks[0][0]==STR: + return'"',toks[0][1] + else: + if toks[0][1]=='<'and toks[-1][1]=='>': + return stringize(toks).lstrip('<').rstrip('>') + raise PreprocError("could not parse include %s."%txt) +def parse_char(txt): + if not txt:raise PreprocError("attempted to parse a null char") + if txt[0]!='\\': + return ord(txt) + c=txt[1] + if c=='x': + if len(txt)==4 and txt[3]in string.hexdigits:return int(txt[2:],16) + return int(txt[2:],16) + elif c.isdigit(): + if c=='0'and len(txt)==2:return 0 + for i in 3,2,1: + if len(txt)>i and txt[1:1+i].isdigit(): + return(1+i,int(txt[1:1+i],8)) + else: + try:return chr_esc[c] + except KeyError:raise PreprocError("could not parse char literal '%s'"%txt) +def tokenize(s): + return tokenize_private(s)[:] +@Utils.run_once +def tokenize_private(s): + ret=[] + for match in re_clexer.finditer(s): + m=match.group + for name in tok_types: + v=m(name) + if v: + if name==IDENT: + try:v=g_optrans[v];name=OP + except KeyError: + if v.lower()=="true": + v=1 + name=NUM + elif v.lower()=="false": + v=0 + name=NUM + elif name==NUM: + if m('oct'):v=int(v,8) + elif m('hex'):v=int(m('hex'),16) + elif m('n0'):v=m('n0') + else: + v=m('char') + if v:v=parse_char(v) + else:v=m('n2')or m('n4') + elif name==OP: + if v=='%:':v='#' + elif v=='%:%:':v='##' + elif name==STR: + v=v[1:-1] + ret.append((name,v)) + break + return ret +@Utils.run_once +def define_name(line): + return re_mac.match(line).group(0) +class c_parser(object): + def __init__(self,nodepaths=None,defines=None): + self.lines=[] + if defines is None: + self.defs={} + else: + self.defs=dict(defines) + self.state=[] + self.count_files=0 + self.currentnode_stack=[] + self.nodepaths=nodepaths or[] + self.nodes=[] + self.names=[] + self.curfile='' + self.ban_includes=set([]) + def cached_find_resource(self,node,filename): + try: + nd=node.ctx.cache_nd + except AttributeError: + nd=node.ctx.cache_nd={} + tup=(node,filename) + try: + return nd[tup] + except KeyError: + ret=node.find_resource(filename) + if ret: + if getattr(ret,'children',None): + ret=None + elif ret.is_child_of(node.ctx.bldnode): + tmp=node.ctx.srcnode.search_node(ret.path_from(node.ctx.bldnode)) + if tmp and getattr(tmp,'children',None): + ret=None + nd[tup]=ret + return ret + def tryfind(self,filename): + self.curfile=filename + found=self.cached_find_resource(self.currentnode_stack[-1],filename) + for n in self.nodepaths: + if found: + break + found=self.cached_find_resource(n,filename) + if found and not found in self.ban_includes: + self.nodes.append(found) + if filename[-4:]!='.moc': + self.addlines(found) + else: + if not filename in self.names: + self.names.append(filename) + return found + def addlines(self,node): + self.currentnode_stack.append(node.parent) + filepath=node.abspath() + self.count_files+=1 + if self.count_files>recursion_limit: + raise PreprocError("recursion limit exceeded") + pc=self.parse_cache + debug('preproc: reading file %r',filepath) + try: + lns=pc[filepath] + except KeyError: + pass + else: + self.lines.extend(lns) + return + try: + lines=filter_comments(filepath) + lines.append((POPFILE,'')) + lines.reverse() + pc[filepath]=lines + self.lines.extend(lines) + except IOError: + raise PreprocError("could not read the file %s"%filepath) + except Exception: + if Logs.verbose>0: + error("parsing %s failed"%filepath) + traceback.print_exc() + def start(self,node,env): + debug('preproc: scanning %s (in %s)',node.name,node.parent.name) + bld=node.ctx + try: + self.parse_cache=bld.parse_cache + except AttributeError: + bld.parse_cache={} + self.parse_cache=bld.parse_cache + self.current_file=node + self.addlines(node) + if env['DEFINES']: + try: + lst=['%s %s'%(x[0],trimquotes('='.join(x[1:])))for x in[y.split('=')for y in env['DEFINES']]] + lst.reverse() + self.lines.extend([('define',x)for x in lst]) + except AttributeError: + pass + while self.lines: + (token,line)=self.lines.pop() + if token==POPFILE: + self.count_files-=1 + self.currentnode_stack.pop() + continue + try: + ve=Logs.verbose + if ve:debug('preproc: line is %s - %s state is %s',token,line,self.state) + state=self.state + if token[:2]=='if': + state.append(undefined) + elif token=='endif': + state.pop() + if token[0]!='e': + if skipped in self.state or ignored in self.state: + continue + if token=='if': + ret=eval_macro(tokenize(line),self.defs) + if ret:state[-1]=accepted + else:state[-1]=ignored + elif token=='ifdef': + m=re_mac.match(line) + if m and m.group(0)in self.defs:state[-1]=accepted + else:state[-1]=ignored + elif token=='ifndef': + m=re_mac.match(line) + if m and m.group(0)in self.defs:state[-1]=ignored + else:state[-1]=accepted + elif token=='include'or token=='import': + (kind,inc)=extract_include(line,self.defs) + if ve:debug('preproc: include found %s (%s) ',inc,kind) + if kind=='"'or not strict_quotes: + self.current_file=self.tryfind(inc) + if token=='import': + self.ban_includes.add(self.current_file) + elif token=='elif': + if state[-1]==accepted: + state[-1]=skipped + elif state[-1]==ignored: + if eval_macro(tokenize(line),self.defs): + state[-1]=accepted + elif token=='else': + if state[-1]==accepted:state[-1]=skipped + elif state[-1]==ignored:state[-1]=accepted + elif token=='define': + try: + self.defs[define_name(line)]=line + except Exception: + raise PreprocError("Invalid define line %s"%line) + elif token=='undef': + m=re_mac.match(line) + if m and m.group(0)in self.defs: + self.defs.__delitem__(m.group(0)) + elif token=='pragma': + if re_pragma_once.match(line.lower()): + self.ban_includes.add(self.current_file) + except Exception ,e: + if Logs.verbose: + debug('preproc: line parsing failed (%s): %s %s',e,line,Utils.ex_stack()) +def scan(task): + global go_absolute + try: + incn=task.generator.includes_nodes + except AttributeError: + raise Errors.WafError('%r is missing a feature such as "c", "cxx" or "includes": '%task.generator) + if go_absolute: + nodepaths=incn+[task.generator.bld.root.find_dir(x)for x in standard_includes] + else: + nodepaths=[x for x in incn if x.is_child_of(x.ctx.srcnode)or x.is_child_of(x.ctx.bldnode)] + tmp=c_parser(nodepaths) + tmp.start(task.inputs[0],task.env) + if Logs.verbose: + debug('deps: deps for %r: %r; unresolved %r'%(task.inputs,tmp.nodes,tmp.names)) + return(tmp.nodes,tmp.names) diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_preproc.pyc b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_preproc.pyc Binary files differnew file mode 100644 index 0000000..843688a --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_preproc.pyc diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_tests.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_tests.py new file mode 100644 index 0000000..f275977 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_tests.py @@ -0,0 +1,153 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +from waflib import Task +from waflib.Configure import conf +from waflib.TaskGen import feature,before_method,after_method +import sys +LIB_CODE=''' +#ifdef _MSC_VER +#define testEXPORT __declspec(dllexport) +#else +#define testEXPORT +#endif +testEXPORT int lib_func(void) { return 9; } +''' +MAIN_CODE=''' +#ifdef _MSC_VER +#define testEXPORT __declspec(dllimport) +#else +#define testEXPORT +#endif +testEXPORT int lib_func(void); +int main(int argc, char **argv) { + (void)argc; (void)argv; + return !(lib_func() == 9); +} +''' +@feature('link_lib_test') +@before_method('process_source') +def link_lib_test_fun(self): + def write_test_file(task): + task.outputs[0].write(task.generator.code) + rpath=[] + if getattr(self,'add_rpath',False): + rpath=[self.bld.path.get_bld().abspath()] + mode=self.mode + m='%s %s'%(mode,mode) + ex=self.test_exec and'test_exec'or'' + bld=self.bld + bld(rule=write_test_file,target='test.'+mode,code=LIB_CODE) + bld(rule=write_test_file,target='main.'+mode,code=MAIN_CODE) + bld(features='%sshlib'%m,source='test.'+mode,target='test') + bld(features='%sprogram %s'%(m,ex),source='main.'+mode,target='app',use='test',rpath=rpath) +@conf +def check_library(self,mode=None,test_exec=True): + if not mode: + mode='c' + if self.env.CXX: + mode='cxx' + self.check(compile_filename=[],features='link_lib_test',msg='Checking for libraries',mode=mode,test_exec=test_exec,) +INLINE_CODE=''' +typedef int foo_t; +static %s foo_t static_foo () {return 0; } +%s foo_t foo () { + return 0; +} +''' +INLINE_VALUES=['inline','__inline__','__inline'] +@conf +def check_inline(self,**kw): + self.start_msg('Checking for inline') + if not'define_name'in kw: + kw['define_name']='INLINE_MACRO' + if not'features'in kw: + if self.env.CXX: + kw['features']=['cxx'] + else: + kw['features']=['c'] + for x in INLINE_VALUES: + kw['fragment']=INLINE_CODE%(x,x) + try: + self.check(**kw) + except self.errors.ConfigurationError: + continue + else: + self.end_msg(x) + if x!='inline': + self.define('inline',x,quote=False) + return x + self.fatal('could not use inline functions') +LARGE_FRAGMENT='''#include <unistd.h> +int main(int argc, char **argv) { + (void)argc; (void)argv; + return !(sizeof(off_t) >= 8); +} +''' +@conf +def check_large_file(self,**kw): + if not'define_name'in kw: + kw['define_name']='HAVE_LARGEFILE' + if not'execute'in kw: + kw['execute']=True + if not'features'in kw: + if self.env.CXX: + kw['features']=['cxx','cxxprogram'] + else: + kw['features']=['c','cprogram'] + kw['fragment']=LARGE_FRAGMENT + kw['msg']='Checking for large file support' + ret=True + try: + if self.env.DEST_BINFMT!='pe': + ret=self.check(**kw) + except self.errors.ConfigurationError: + pass + else: + if ret: + return True + kw['msg']='Checking for -D_FILE_OFFSET_BITS=64' + kw['defines']=['_FILE_OFFSET_BITS=64'] + try: + ret=self.check(**kw) + except self.errors.ConfigurationError: + pass + else: + self.define('_FILE_OFFSET_BITS',64) + return ret + self.fatal('There is no support for large files') +ENDIAN_FRAGMENT=''' +short int ascii_mm[] = { 0x4249, 0x4765, 0x6E44, 0x6961, 0x6E53, 0x7953, 0 }; +short int ascii_ii[] = { 0x694C, 0x5454, 0x656C, 0x6E45, 0x6944, 0x6E61, 0 }; +int use_ascii (int i) { + return ascii_mm[i] + ascii_ii[i]; +} +short int ebcdic_ii[] = { 0x89D3, 0xE3E3, 0x8593, 0x95C5, 0x89C4, 0x9581, 0 }; +short int ebcdic_mm[] = { 0xC2C9, 0xC785, 0x95C4, 0x8981, 0x95E2, 0xA8E2, 0 }; +int use_ebcdic (int i) { + return ebcdic_mm[i] + ebcdic_ii[i]; +} +extern int foo; +''' +class grep_for_endianness(Task.Task): + color='PINK' + def run(self): + txt=self.inputs[0].read(flags='rb').decode('iso8859-1') + if txt.find('LiTTleEnDian')>-1: + self.generator.tmp.append('little') + elif txt.find('BIGenDianSyS')>-1: + self.generator.tmp.append('big') + else: + return-1 +@feature('grep_for_endianness') +@after_method('process_source') +def grep_for_endianness_fun(self): + self.create_task('grep_for_endianness',self.compiled_tasks[0].outputs[0]) +@conf +def check_endianness(self): + tmp=[] + def check_msg(self): + return tmp[0] + self.check(fragment=ENDIAN_FRAGMENT,features='c grep_for_endianness',msg="Checking for endianness",define='ENDIANNESS',tmp=tmp,okmsg=check_msg) + return tmp[0] diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_tests.pyc b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_tests.pyc Binary files differnew file mode 100644 index 0000000..22ef2a0 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_tests.pyc diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/ccroot.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/ccroot.py new file mode 100644 index 0000000..2fde5c7 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/ccroot.py @@ -0,0 +1,405 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,re +from waflib import Task,Utils,Node,Errors +from waflib.TaskGen import after_method,before_method,feature,taskgen_method,extension +from waflib.Tools import c_aliases,c_preproc,c_config,c_osx,c_tests +from waflib.Configure import conf +SYSTEM_LIB_PATHS=['/usr/lib64','/usr/lib','/usr/local/lib64','/usr/local/lib'] +USELIB_VARS=Utils.defaultdict(set) +USELIB_VARS['c']=set(['INCLUDES','FRAMEWORKPATH','DEFINES','CPPFLAGS','CCDEPS','CFLAGS','ARCH']) +USELIB_VARS['cxx']=set(['INCLUDES','FRAMEWORKPATH','DEFINES','CPPFLAGS','CXXDEPS','CXXFLAGS','ARCH']) +USELIB_VARS['d']=set(['INCLUDES','DFLAGS']) +USELIB_VARS['includes']=set(['INCLUDES','FRAMEWORKPATH','ARCH']) +USELIB_VARS['cprogram']=USELIB_VARS['cxxprogram']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS','FRAMEWORK','FRAMEWORKPATH','ARCH']) +USELIB_VARS['cshlib']=USELIB_VARS['cxxshlib']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS','FRAMEWORK','FRAMEWORKPATH','ARCH']) +USELIB_VARS['cstlib']=USELIB_VARS['cxxstlib']=set(['ARFLAGS','LINKDEPS']) +USELIB_VARS['dprogram']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS']) +USELIB_VARS['dshlib']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS']) +USELIB_VARS['dstlib']=set(['ARFLAGS','LINKDEPS']) +USELIB_VARS['asm']=set(['ASFLAGS']) +@taskgen_method +def create_compiled_task(self,name,node): + out='%s.%d.o'%(node.name,self.idx) + task=self.create_task(name,node,node.parent.find_or_declare(out)) + try: + self.compiled_tasks.append(task) + except AttributeError: + self.compiled_tasks=[task] + return task +@taskgen_method +def to_incnodes(self,inlst): + lst=[] + seen=set([]) + for x in self.to_list(inlst): + if x in seen or not x: + continue + seen.add(x) + if isinstance(x,Node.Node): + lst.append(x) + else: + if os.path.isabs(x): + lst.append(self.bld.root.make_node(x)or x) + else: + if x[0]=='#': + p=self.bld.bldnode.make_node(x[1:]) + v=self.bld.srcnode.make_node(x[1:]) + else: + p=self.path.get_bld().make_node(x) + v=self.path.make_node(x) + if p.is_child_of(self.bld.bldnode): + p.mkdir() + lst.append(p) + lst.append(v) + return lst +@feature('c','cxx','d','asm','fc','includes') +@after_method('propagate_uselib_vars','process_source') +def apply_incpaths(self): + lst=self.to_incnodes(self.to_list(getattr(self,'includes',[]))+self.env['INCLUDES']) + self.includes_nodes=lst + self.env['INCPATHS']=[x.abspath()for x in lst] +class link_task(Task.Task): + color='YELLOW' + inst_to=None + chmod=Utils.O755 + def add_target(self,target): + if isinstance(target,str): + pattern=self.env[self.__class__.__name__+'_PATTERN'] + if not pattern: + pattern='%s' + folder,name=os.path.split(target) + if self.__class__.__name__.find('shlib')>0 and getattr(self.generator,'vnum',None): + nums=self.generator.vnum.split('.') + if self.env.DEST_BINFMT=='pe': + name=name+'-'+nums[0] + elif self.env.DEST_OS=='openbsd': + pattern='%s.%s.%s'%(pattern,nums[0],nums[1]) + tmp=folder+os.sep+pattern%name + target=self.generator.path.find_or_declare(tmp) + self.set_outputs(target) +class stlink_task(link_task): + run_str='${AR} ${ARFLAGS} ${AR_TGT_F}${TGT} ${AR_SRC_F}${SRC}' +def rm_tgt(cls): + old=cls.run + def wrap(self): + try:os.remove(self.outputs[0].abspath()) + except OSError:pass + return old(self) + setattr(cls,'run',wrap) +rm_tgt(stlink_task) +@feature('c','cxx','d','fc','asm') +@after_method('process_source') +def apply_link(self): + for x in self.features: + if x=='cprogram'and'cxx'in self.features: + x='cxxprogram' + elif x=='cshlib'and'cxx'in self.features: + x='cxxshlib' + if x in Task.classes: + if issubclass(Task.classes[x],link_task): + link=x + break + else: + return + objs=[t.outputs[0]for t in getattr(self,'compiled_tasks',[])] + self.link_task=self.create_task(link,objs) + self.link_task.add_target(self.target) + try: + inst_to=self.install_path + except AttributeError: + inst_to=self.link_task.__class__.inst_to + if inst_to: + self.install_task=self.bld.install_files(inst_to,self.link_task.outputs[:],env=self.env,chmod=self.link_task.chmod) +@taskgen_method +def use_rec(self,name,**kw): + if name in self.tmp_use_not or name in self.tmp_use_seen: + return + try: + y=self.bld.get_tgen_by_name(name) + except Errors.WafError: + self.uselib.append(name) + self.tmp_use_not.add(name) + return + self.tmp_use_seen.append(name) + y.post() + y.tmp_use_objects=objects=kw.get('objects',True) + y.tmp_use_stlib=stlib=kw.get('stlib',True) + try: + link_task=y.link_task + except AttributeError: + y.tmp_use_var='' + else: + objects=False + if not isinstance(link_task,stlink_task): + stlib=False + y.tmp_use_var='LIB' + else: + y.tmp_use_var='STLIB' + p=self.tmp_use_prec + for x in self.to_list(getattr(y,'use',[])): + try: + p[x].append(name) + except KeyError: + p[x]=[name] + self.use_rec(x,objects=objects,stlib=stlib) +@feature('c','cxx','d','use','fc') +@before_method('apply_incpaths','propagate_uselib_vars') +@after_method('apply_link','process_source') +def process_use(self): + use_not=self.tmp_use_not=set([]) + self.tmp_use_seen=[] + use_prec=self.tmp_use_prec={} + self.uselib=self.to_list(getattr(self,'uselib',[])) + self.includes=self.to_list(getattr(self,'includes',[])) + names=self.to_list(getattr(self,'use',[])) + for x in names: + self.use_rec(x) + for x in use_not: + if x in use_prec: + del use_prec[x] + out=[] + tmp=[] + for x in self.tmp_use_seen: + for k in use_prec.values(): + if x in k: + break + else: + tmp.append(x) + while tmp: + e=tmp.pop() + out.append(e) + try: + nlst=use_prec[e] + except KeyError: + pass + else: + del use_prec[e] + for x in nlst: + for y in use_prec: + if x in use_prec[y]: + break + else: + tmp.append(x) + if use_prec: + raise Errors.WafError('Cycle detected in the use processing %r'%use_prec) + out.reverse() + link_task=getattr(self,'link_task',None) + for x in out: + y=self.bld.get_tgen_by_name(x) + var=y.tmp_use_var + if var and link_task: + if var=='LIB'or y.tmp_use_stlib: + self.env.append_value(var,[y.target[y.target.rfind(os.sep)+1:]]) + self.link_task.dep_nodes.extend(y.link_task.outputs) + tmp_path=y.link_task.outputs[0].parent.path_from(self.bld.bldnode) + self.env.append_value(var+'PATH',[tmp_path]) + else: + if y.tmp_use_objects: + self.add_objects_from_tgen(y) + if getattr(y,'export_includes',None): + self.includes.extend(y.to_incnodes(y.export_includes)) + if getattr(y,'export_defines',None): + self.env.append_value('DEFINES',self.to_list(y.export_defines)) + for x in names: + try: + y=self.bld.get_tgen_by_name(x) + except Errors.WafError: + if not self.env['STLIB_'+x]and not x in self.uselib: + self.uselib.append(x) + else: + for k in self.to_list(getattr(y,'use',[])): + if not self.env['STLIB_'+k]and not k in self.uselib: + self.uselib.append(k) +@taskgen_method +def accept_node_to_link(self,node): + return not node.name.endswith('.pdb') +@taskgen_method +def add_objects_from_tgen(self,tg): + try: + link_task=self.link_task + except AttributeError: + pass + else: + for tsk in getattr(tg,'compiled_tasks',[]): + for x in tsk.outputs: + if self.accept_node_to_link(x): + link_task.inputs.append(x) +@taskgen_method +def get_uselib_vars(self): + _vars=set([]) + for x in self.features: + if x in USELIB_VARS: + _vars|=USELIB_VARS[x] + return _vars +@feature('c','cxx','d','fc','javac','cs','uselib','asm') +@after_method('process_use') +def propagate_uselib_vars(self): + _vars=self.get_uselib_vars() + env=self.env + for x in _vars: + y=x.lower() + env.append_unique(x,self.to_list(getattr(self,y,[]))) + for x in self.features: + for var in _vars: + compvar='%s_%s'%(var,x) + env.append_value(var,env[compvar]) + for x in self.to_list(getattr(self,'uselib',[])): + for v in _vars: + env.append_value(v,env[v+'_'+x]) +@feature('cshlib','cxxshlib','fcshlib') +@after_method('apply_link') +def apply_implib(self): + if not self.env.DEST_BINFMT=='pe': + return + dll=self.link_task.outputs[0] + if isinstance(self.target,Node.Node): + name=self.target.name + else: + name=os.path.split(self.target)[1] + implib=self.env['implib_PATTERN']%name + implib=dll.parent.find_or_declare(implib) + self.env.append_value('LINKFLAGS',self.env['IMPLIB_ST']%implib.bldpath()) + self.link_task.outputs.append(implib) + if getattr(self,'defs',None)and self.env.DEST_BINFMT=='pe': + node=self.path.find_resource(self.defs) + if not node: + raise Errors.WafError('invalid def file %r'%self.defs) + if'msvc'in(self.env.CC_NAME,self.env.CXX_NAME): + self.env.append_value('LINKFLAGS','/def:%s'%node.path_from(self.bld.bldnode)) + self.link_task.dep_nodes.append(node) + else: + self.link_task.inputs.append(node) + try: + inst_to=self.install_path + except AttributeError: + inst_to=self.link_task.__class__.inst_to + if not inst_to: + return + self.implib_install_task=self.bld.install_as('${LIBDIR}/%s'%implib.name,implib,self.env) +re_vnum=re.compile('^([1-9]\\d*|0)[.]([1-9]\\d*|0)[.]([1-9]\\d*|0)$') +@feature('cshlib','cxxshlib','dshlib','fcshlib','vnum') +@after_method('apply_link','propagate_uselib_vars') +def apply_vnum(self): + if not getattr(self,'vnum','')or os.name!='posix'or self.env.DEST_BINFMT not in('elf','mac-o'): + return + link=self.link_task + if not re_vnum.match(self.vnum): + raise Errors.WafError('Invalid version %r for %r'%(self.vnum,self)) + nums=self.vnum.split('.') + node=link.outputs[0] + libname=node.name + if libname.endswith('.dylib'): + name3=libname.replace('.dylib','.%s.dylib'%self.vnum) + name2=libname.replace('.dylib','.%s.dylib'%nums[0]) + else: + name3=libname+'.'+self.vnum + name2=libname+'.'+nums[0] + if self.env.SONAME_ST: + v=self.env.SONAME_ST%name2 + self.env.append_value('LINKFLAGS',v.split()) + if self.env.DEST_OS!='openbsd': + self.create_task('vnum',node,[node.parent.find_or_declare(name2),node.parent.find_or_declare(name3)]) + if getattr(self,'install_task',None): + self.install_task.hasrun=Task.SKIP_ME + bld=self.bld + path=self.install_task.dest + if self.env.DEST_OS=='openbsd': + libname=self.link_task.outputs[0].name + t1=bld.install_as('%s%s%s'%(path,os.sep,libname),node,env=self.env,chmod=self.link_task.chmod) + self.vnum_install_task=(t1,) + else: + t1=bld.install_as(path+os.sep+name3,node,env=self.env,chmod=self.link_task.chmod) + t2=bld.symlink_as(path+os.sep+name2,name3) + t3=bld.symlink_as(path+os.sep+libname,name3) + self.vnum_install_task=(t1,t2,t3) + if'-dynamiclib'in self.env['LINKFLAGS']: + try: + inst_to=self.install_path + except AttributeError: + inst_to=self.link_task.__class__.inst_to + if inst_to: + p=Utils.subst_vars(inst_to,self.env) + path=os.path.join(p,self.link_task.outputs[0].name) + self.env.append_value('LINKFLAGS',['-install_name',path]) +class vnum(Task.Task): + color='CYAN' + quient=True + ext_in=['.bin'] + def run(self): + for x in self.outputs: + path=x.abspath() + try: + os.remove(path) + except OSError: + pass + try: + os.symlink(self.inputs[0].name,path) + except OSError: + return 1 +class fake_shlib(link_task): + def runnable_status(self): + for t in self.run_after: + if not t.hasrun: + return Task.ASK_LATER + for x in self.outputs: + x.sig=Utils.h_file(x.abspath()) + return Task.SKIP_ME +class fake_stlib(stlink_task): + def runnable_status(self): + for t in self.run_after: + if not t.hasrun: + return Task.ASK_LATER + for x in self.outputs: + x.sig=Utils.h_file(x.abspath()) + return Task.SKIP_ME +@conf +def read_shlib(self,name,paths=[],export_includes=[],export_defines=[]): + return self(name=name,features='fake_lib',lib_paths=paths,lib_type='shlib',export_includes=export_includes,export_defines=export_defines) +@conf +def read_stlib(self,name,paths=[],export_includes=[],export_defines=[]): + return self(name=name,features='fake_lib',lib_paths=paths,lib_type='stlib',export_includes=export_includes,export_defines=export_defines) +lib_patterns={'shlib':['lib%s.so','%s.so','lib%s.dylib','lib%s.dll','%s.dll'],'stlib':['lib%s.a','%s.a','lib%s.dll','%s.dll','lib%s.lib','%s.lib'],} +@feature('fake_lib') +def process_lib(self): + node=None + names=[x%self.name for x in lib_patterns[self.lib_type]] + for x in self.lib_paths+[self.path]+SYSTEM_LIB_PATHS: + if not isinstance(x,Node.Node): + x=self.bld.root.find_node(x)or self.path.find_node(x) + if not x: + continue + for y in names: + node=x.find_node(y) + if node: + node.sig=Utils.h_file(node.abspath()) + break + else: + continue + break + else: + raise Errors.WafError('could not find library %r'%self.name) + self.link_task=self.create_task('fake_%s'%self.lib_type,[],[node]) + self.target=self.name +class fake_o(Task.Task): + def runnable_status(self): + return Task.SKIP_ME +@extension('.o','.obj') +def add_those_o_files(self,node): + tsk=self.create_task('fake_o',[],node) + try: + self.compiled_tasks.append(tsk) + except AttributeError: + self.compiled_tasks=[tsk] +@feature('fake_obj') +@before_method('process_source') +def process_objs(self): + for node in self.to_nodes(self.source): + self.add_those_o_files(node) + self.source=[] +@conf +def read_object(self,obj): + if not isinstance(obj,self.path.__class__): + obj=self.path.find_resource(obj) + return self(features='fake_obj',source=obj,name=obj.name) diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/ccroot.pyc b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/ccroot.pyc Binary files differnew file mode 100644 index 0000000..5aa74ff --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/ccroot.pyc diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/compiler_c.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/compiler_c.py new file mode 100644 index 0000000..04504fa --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/compiler_c.py @@ -0,0 +1,39 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,sys,imp,types +from waflib.Tools import ccroot +from waflib import Utils,Configure +from waflib.Logs import debug +c_compiler={'win32':['msvc','gcc'],'cygwin':['gcc'],'darwin':['gcc'],'aix':['xlc','gcc'],'linux':['gcc','icc'],'sunos':['suncc','gcc'],'irix':['gcc','irixcc'],'hpux':['gcc'],'gnu':['gcc'],'java':['gcc','msvc','icc'],'default':['gcc'],} +def configure(conf): + try:test_for_compiler=conf.options.check_c_compiler + except AttributeError:conf.fatal("Add options(opt): opt.load('compiler_c')") + for compiler in test_for_compiler.split(): + conf.env.stash() + conf.start_msg('Checking for %r (c compiler)'%compiler) + try: + conf.load(compiler) + except conf.errors.ConfigurationError ,e: + conf.env.revert() + conf.end_msg(False) + debug('compiler_c: %r'%e) + else: + if conf.env['CC']: + conf.end_msg(conf.env.get_flat('CC')) + conf.env['COMPILER_CC']=compiler + break + conf.end_msg(False) + else: + conf.fatal('could not configure a c compiler!') +def options(opt): + opt.load_special_tools('c_*.py',ban=['c_dumbpreproc.py']) + global c_compiler + build_platform=Utils.unversioned_sys_platform() + possible_compiler_list=c_compiler[build_platform in c_compiler and build_platform or'default'] + test_for_compiler=' '.join(possible_compiler_list) + cc_compiler_opts=opt.add_option_group("C Compiler Options") + cc_compiler_opts.add_option('--check-c-compiler',default="%s"%test_for_compiler,help='On this platform (%s) the following C-Compiler will be checked by default: "%s"'%(build_platform,test_for_compiler),dest="check_c_compiler") + for x in test_for_compiler.split(): + opt.load('%s'%x) diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/compiler_c.pyc b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/compiler_c.pyc Binary files differnew file mode 100644 index 0000000..5a8bc47 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/compiler_c.pyc diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/compiler_cxx.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/compiler_cxx.py new file mode 100644 index 0000000..14b7c7d --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/compiler_cxx.py @@ -0,0 +1,39 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,sys,imp,types +from waflib.Tools import ccroot +from waflib import Utils,Configure +from waflib.Logs import debug +cxx_compiler={'win32':['msvc','g++'],'cygwin':['g++'],'darwin':['g++'],'aix':['xlc++','g++'],'linux':['g++','icpc'],'sunos':['sunc++','g++'],'irix':['g++'],'hpux':['g++'],'gnu':['g++'],'java':['g++','msvc','icpc'],'default':['g++']} +def configure(conf): + try:test_for_compiler=conf.options.check_cxx_compiler + except AttributeError:conf.fatal("Add options(opt): opt.load('compiler_cxx')") + for compiler in test_for_compiler.split(): + conf.env.stash() + conf.start_msg('Checking for %r (c++ compiler)'%compiler) + try: + conf.load(compiler) + except conf.errors.ConfigurationError ,e: + conf.env.revert() + conf.end_msg(False) + debug('compiler_cxx: %r'%e) + else: + if conf.env['CXX']: + conf.end_msg(conf.env.get_flat('CXX')) + conf.env['COMPILER_CXX']=compiler + break + conf.end_msg(False) + else: + conf.fatal('could not configure a c++ compiler!') +def options(opt): + opt.load_special_tools('cxx_*.py') + global cxx_compiler + build_platform=Utils.unversioned_sys_platform() + possible_compiler_list=cxx_compiler[build_platform in cxx_compiler and build_platform or'default'] + test_for_compiler=' '.join(possible_compiler_list) + cxx_compiler_opts=opt.add_option_group('C++ Compiler Options') + cxx_compiler_opts.add_option('--check-cxx-compiler',default="%s"%test_for_compiler,help='On this platform (%s) the following C++ Compiler will be checked by default: "%s"'%(build_platform,test_for_compiler),dest="check_cxx_compiler") + for x in test_for_compiler.split(): + opt.load('%s'%x) diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/compiler_cxx.pyc b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/compiler_cxx.pyc Binary files differnew file mode 100644 index 0000000..cf375d4 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/compiler_cxx.pyc diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/compiler_d.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/compiler_d.py new file mode 100644 index 0000000..ee173e1 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/compiler_d.py @@ -0,0 +1,29 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,sys,imp,types +from waflib import Utils,Configure,Options,Logs +def configure(conf): + for compiler in conf.options.dcheck.split(','): + conf.env.stash() + conf.start_msg('Checking for %r (d compiler)'%compiler) + try: + conf.load(compiler) + except conf.errors.ConfigurationError ,e: + conf.env.revert() + conf.end_msg(False) + Logs.debug('compiler_d: %r'%e) + else: + if conf.env.D: + conf.end_msg(conf.env.get_flat('D')) + conf.env['COMPILER_D']=compiler + break + conf.end_msg(False) + else: + conf.fatal('no suitable d compiler was found') +def options(opt): + d_compiler_opts=opt.add_option_group('D Compiler Options') + d_compiler_opts.add_option('--check-d-compiler',default='gdc,dmd,ldc2',action='store',help='check for the compiler [Default:gdc,dmd,ldc2]',dest='dcheck') + for d_compiler in['gdc','dmd','ldc2']: + opt.load('%s'%d_compiler) diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/compiler_fc.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/compiler_fc.py new file mode 100644 index 0000000..ec5d2ea --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/compiler_fc.py @@ -0,0 +1,43 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,sys,imp,types +from waflib import Utils,Configure,Options,Logs,Errors +from waflib.Tools import fc +fc_compiler={'win32':['gfortran','ifort'],'darwin':['gfortran','g95','ifort'],'linux':['gfortran','g95','ifort'],'java':['gfortran','g95','ifort'],'default':['gfortran'],'aix':['gfortran']} +def __list_possible_compiler(platform): + try: + return fc_compiler[platform] + except KeyError: + return fc_compiler["default"] +def configure(conf): + try:test_for_compiler=conf.options.check_fc + except AttributeError:conf.fatal("Add options(opt): opt.load('compiler_fc')") + for compiler in test_for_compiler.split(): + conf.env.stash() + conf.start_msg('Checking for %r (fortran compiler)'%compiler) + try: + conf.load(compiler) + except conf.errors.ConfigurationError ,e: + conf.env.revert() + conf.end_msg(False) + Logs.debug('compiler_fortran: %r'%e) + else: + if conf.env['FC']: + conf.end_msg(conf.env.get_flat('FC')) + conf.env.COMPILER_FORTRAN=compiler + break + conf.end_msg(False) + else: + conf.fatal('could not configure a fortran compiler!') +def options(opt): + opt.load_special_tools('fc_*.py') + build_platform=Utils.unversioned_sys_platform() + detected_platform=Options.platform + possible_compiler_list=__list_possible_compiler(detected_platform) + test_for_compiler=' '.join(possible_compiler_list) + fortran_compiler_opts=opt.add_option_group("Fortran Compiler Options") + fortran_compiler_opts.add_option('--check-fortran-compiler',default="%s"%test_for_compiler,help='On this platform (%s) the following Fortran Compiler will be checked by default: "%s"'%(detected_platform,test_for_compiler),dest="check_fc") + for compiler in test_for_compiler.split(): + opt.load('%s'%compiler) diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/cs.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/cs.py new file mode 100644 index 0000000..a78e138 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/cs.py @@ -0,0 +1,132 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +from waflib import Utils,Task,Options,Logs,Errors +from waflib.TaskGen import before_method,after_method,feature +from waflib.Tools import ccroot +from waflib.Configure import conf +import os,tempfile +ccroot.USELIB_VARS['cs']=set(['CSFLAGS','ASSEMBLIES','RESOURCES']) +ccroot.lib_patterns['csshlib']=['%s'] +@feature('cs') +@before_method('process_source') +def apply_cs(self): + cs_nodes=[] + no_nodes=[] + for x in self.to_nodes(self.source): + if x.name.endswith('.cs'): + cs_nodes.append(x) + else: + no_nodes.append(x) + self.source=no_nodes + bintype=getattr(self,'bintype',self.gen.endswith('.dll')and'library'or'exe') + self.cs_task=tsk=self.create_task('mcs',cs_nodes,self.path.find_or_declare(self.gen)) + tsk.env.CSTYPE='/target:%s'%bintype + tsk.env.OUT='/out:%s'%tsk.outputs[0].abspath() + self.env.append_value('CSFLAGS','/platform:%s'%getattr(self,'platform','anycpu')) + inst_to=getattr(self,'install_path',bintype=='exe'and'${BINDIR}'or'${LIBDIR}') + if inst_to: + mod=getattr(self,'chmod',bintype=='exe'and Utils.O755 or Utils.O644) + self.install_task=self.bld.install_files(inst_to,self.cs_task.outputs[:],env=self.env,chmod=mod) +@feature('cs') +@after_method('apply_cs') +def use_cs(self): + names=self.to_list(getattr(self,'use',[])) + get=self.bld.get_tgen_by_name + for x in names: + try: + y=get(x) + except Errors.WafError: + self.env.append_value('CSFLAGS','/reference:%s'%x) + continue + y.post() + tsk=getattr(y,'cs_task',None)or getattr(y,'link_task',None) + if not tsk: + self.bld.fatal('cs task has no link task for use %r'%self) + self.cs_task.dep_nodes.extend(tsk.outputs) + self.cs_task.set_run_after(tsk) + self.env.append_value('CSFLAGS','/reference:%s'%tsk.outputs[0].abspath()) +@feature('cs') +@after_method('apply_cs','use_cs') +def debug_cs(self): + csdebug=getattr(self,'csdebug',self.env.CSDEBUG) + if not csdebug: + return + node=self.cs_task.outputs[0] + if self.env.CS_NAME=='mono': + out=node.parent.find_or_declare(node.name+'.mdb') + else: + out=node.change_ext('.pdb') + self.cs_task.outputs.append(out) + try: + self.install_task.source.append(out) + except AttributeError: + pass + if csdebug=='pdbonly': + val=['/debug+','/debug:pdbonly'] + elif csdebug=='full': + val=['/debug+','/debug:full'] + else: + val=['/debug-'] + self.env.append_value('CSFLAGS',val) +class mcs(Task.Task): + color='YELLOW' + run_str='${MCS} ${CSTYPE} ${CSFLAGS} ${ASS_ST:ASSEMBLIES} ${RES_ST:RESOURCES} ${OUT} ${SRC}' + def exec_command(self,cmd,**kw): + bld=self.generator.bld + try: + if not kw.get('cwd',None): + kw['cwd']=bld.cwd + except AttributeError: + bld.cwd=kw['cwd']=bld.variant_dir + try: + tmp=None + if isinstance(cmd,list)and len(' '.join(cmd))>=8192: + program=cmd[0] + cmd=[self.quote_response_command(x)for x in cmd] + (fd,tmp)=tempfile.mkstemp() + os.write(fd,'\r\n'.join(i.replace('\\','\\\\')for i in cmd[1:])) + os.close(fd) + cmd=[program,'@'+tmp] + ret=self.generator.bld.exec_command(cmd,**kw) + finally: + if tmp: + try: + os.remove(tmp) + except OSError: + pass + return ret + def quote_response_command(self,flag): + if flag.lower()=='/noconfig': + return'' + if flag.find(' ')>-1: + for x in('/r:','/reference:','/resource:','/lib:','/out:'): + if flag.startswith(x): + flag='%s"%s"'%(x,'","'.join(flag[len(x):].split(','))) + break + else: + flag='"%s"'%flag + return flag +def configure(conf): + csc=getattr(Options.options,'cscbinary',None) + if csc: + conf.env.MCS=csc + conf.find_program(['csc','mcs','gmcs'],var='MCS') + conf.env.ASS_ST='/r:%s' + conf.env.RES_ST='/resource:%s' + conf.env.CS_NAME='csc' + if str(conf.env.MCS).lower().find('mcs')>-1: + conf.env.CS_NAME='mono' +def options(opt): + opt.add_option('--with-csc-binary',type='string',dest='cscbinary') +class fake_csshlib(Task.Task): + color='YELLOW' + inst_to=None + def runnable_status(self): + for x in self.outputs: + x.sig=Utils.h_file(x.abspath()) + return Task.SKIP_ME +@conf +def read_csshlib(self,name,paths=[]): + return self(name=name,features='fake_lib',lib_paths=paths,lib_type='csshlib') diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/cxx.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/cxx.py new file mode 100644 index 0000000..b744a8d --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/cxx.py @@ -0,0 +1,26 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +from waflib import TaskGen,Task,Utils +from waflib.Tools import c_preproc +from waflib.Tools.ccroot import link_task,stlink_task +@TaskGen.extension('.cpp','.cc','.cxx','.C','.c++') +def cxx_hook(self,node): + return self.create_compiled_task('cxx',node) +if not'.c'in TaskGen.task_gen.mappings: + TaskGen.task_gen.mappings['.c']=TaskGen.task_gen.mappings['.cpp'] +class cxx(Task.Task): + run_str='${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT}' + vars=['CXXDEPS'] + ext_in=['.h'] + scan=c_preproc.scan +class cxxprogram(link_task): + run_str='${LINK_CXX} ${LINKFLAGS} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB}' + vars=['LINKDEPS'] + ext_out=['.bin'] + inst_to='${BINDIR}' +class cxxshlib(cxxprogram): + inst_to='${LIBDIR}' +class cxxstlib(stlink_task): + pass diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/cxx.pyc b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/cxx.pyc Binary files differnew file mode 100644 index 0000000..11e2e1f --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/cxx.pyc diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/d.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/d.py new file mode 100644 index 0000000..1838740 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/d.py @@ -0,0 +1,54 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +from waflib import Utils,Task,Errors +from waflib.TaskGen import taskgen_method,feature,extension +from waflib.Tools import d_scan,d_config +from waflib.Tools.ccroot import link_task,stlink_task +class d(Task.Task): + color='GREEN' + run_str='${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_SRC_F:SRC} ${D_TGT_F:TGT}' + scan=d_scan.scan +class d_with_header(d): + run_str='${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_HDR_F:tgt.outputs[1].bldpath()} ${D_SRC_F:SRC} ${D_TGT_F:tgt.outputs[0].bldpath()}' +class d_header(Task.Task): + color='BLUE' + run_str='${D} ${D_HEADER} ${SRC}' +class dprogram(link_task): + run_str='${D_LINKER} ${LINKFLAGS} ${DLNK_SRC_F}${SRC} ${DLNK_TGT_F:TGT} ${RPATH_ST:RPATH} ${DSTLIB_MARKER} ${DSTLIBPATH_ST:STLIBPATH} ${DSTLIB_ST:STLIB} ${DSHLIB_MARKER} ${DLIBPATH_ST:LIBPATH} ${DSHLIB_ST:LIB}' + inst_to='${BINDIR}' +class dshlib(dprogram): + inst_to='${LIBDIR}' +class dstlib(stlink_task): + pass +@extension('.d','.di','.D') +def d_hook(self,node): + ext=Utils.destos_to_binfmt(self.env.DEST_OS)=='pe'and'obj'or'o' + out='%s.%d.%s'%(node.name,self.idx,ext) + def create_compiled_task(self,name,node): + task=self.create_task(name,node,node.parent.find_or_declare(out)) + try: + self.compiled_tasks.append(task) + except AttributeError: + self.compiled_tasks=[task] + return task + if getattr(self,'generate_headers',None): + tsk=create_compiled_task(self,'d_with_header',node) + tsk.outputs.append(node.change_ext(self.env['DHEADER_ext'])) + else: + tsk=create_compiled_task(self,'d',node) + return tsk +@taskgen_method +def generate_header(self,filename): + try: + self.header_lst.append([filename,self.install_path]) + except AttributeError: + self.header_lst=[[filename,self.install_path]] +@feature('d') +def process_header(self): + for i in getattr(self,'header_lst',[]): + node=self.path.find_resource(i[0]) + if not node: + raise Errors.WafError('file %r not found on d obj'%i[0]) + self.create_task('d_header',node,node.change_ext('.di')) diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/d_config.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/d_config.py new file mode 100644 index 0000000..50660ea --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/d_config.py @@ -0,0 +1,52 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +from waflib import Utils +from waflib.Configure import conf +@conf +def d_platform_flags(self): + v=self.env + if not v.DEST_OS: + v.DEST_OS=Utils.unversioned_sys_platform() + binfmt=Utils.destos_to_binfmt(self.env.DEST_OS) + if binfmt=='pe': + v['dprogram_PATTERN']='%s.exe' + v['dshlib_PATTERN']='lib%s.dll' + v['dstlib_PATTERN']='lib%s.a' + elif binfmt=='mac-o': + v['dprogram_PATTERN']='%s' + v['dshlib_PATTERN']='lib%s.dylib' + v['dstlib_PATTERN']='lib%s.a' + else: + v['dprogram_PATTERN']='%s' + v['dshlib_PATTERN']='lib%s.so' + v['dstlib_PATTERN']='lib%s.a' +DLIB=''' +version(D_Version2) { + import std.stdio; + int main() { + writefln("phobos2"); + return 0; + } +} else { + version(Tango) { + import tango.stdc.stdio; + int main() { + printf("tango"); + return 0; + } + } else { + import std.stdio; + int main() { + writefln("phobos1"); + return 0; + } + } +} +''' +@conf +def check_dlibrary(self,execute=True): + ret=self.check_cc(features='d dprogram',fragment=DLIB,compile_filename='test.d',execute=execute,define_ret=True) + if execute: + self.env.DLIBRARY=ret.strip() diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/d_scan.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/d_scan.py new file mode 100644 index 0000000..ee80c5f --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/d_scan.py @@ -0,0 +1,133 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import re +from waflib import Utils,Logs +def filter_comments(filename): + txt=Utils.readf(filename) + i=0 + buf=[] + max=len(txt) + begin=0 + while i<max: + c=txt[i] + if c=='"'or c=="'": + buf.append(txt[begin:i]) + delim=c + i+=1 + while i<max: + c=txt[i] + if c==delim:break + elif c=='\\': + i+=1 + i+=1 + i+=1 + begin=i + elif c=='/': + buf.append(txt[begin:i]) + i+=1 + if i==max:break + c=txt[i] + if c=='+': + i+=1 + nesting=1 + c=None + while i<max: + prev=c + c=txt[i] + if prev=='/'and c=='+': + nesting+=1 + c=None + elif prev=='+'and c=='/': + nesting-=1 + if nesting==0:break + c=None + i+=1 + elif c=='*': + i+=1 + c=None + while i<max: + prev=c + c=txt[i] + if prev=='*'and c=='/':break + i+=1 + elif c=='/': + i+=1 + while i<max and txt[i]!='\n': + i+=1 + else: + begin=i-1 + continue + i+=1 + begin=i + buf.append(' ') + else: + i+=1 + buf.append(txt[begin:]) + return buf +class d_parser(object): + def __init__(self,env,incpaths): + self.allnames=[] + self.re_module=re.compile("module\s+([^;]+)") + self.re_import=re.compile("import\s+([^;]+)") + self.re_import_bindings=re.compile("([^:]+):(.*)") + self.re_import_alias=re.compile("[^=]+=(.+)") + self.env=env + self.nodes=[] + self.names=[] + self.incpaths=incpaths + def tryfind(self,filename): + found=0 + for n in self.incpaths: + found=n.find_resource(filename.replace('.','/')+'.d') + if found: + self.nodes.append(found) + self.waiting.append(found) + break + if not found: + if not filename in self.names: + self.names.append(filename) + def get_strings(self,code): + self.module='' + lst=[] + mod_name=self.re_module.search(code) + if mod_name: + self.module=re.sub('\s+','',mod_name.group(1)) + import_iterator=self.re_import.finditer(code) + if import_iterator: + for import_match in import_iterator: + import_match_str=re.sub('\s+','',import_match.group(1)) + bindings_match=self.re_import_bindings.match(import_match_str) + if bindings_match: + import_match_str=bindings_match.group(1) + matches=import_match_str.split(',') + for match in matches: + alias_match=self.re_import_alias.match(match) + if alias_match: + match=alias_match.group(1) + lst.append(match) + return lst + def start(self,node): + self.waiting=[node] + while self.waiting: + nd=self.waiting.pop(0) + self.iter(nd) + def iter(self,node): + path=node.abspath() + code="".join(filter_comments(path)) + names=self.get_strings(code) + for x in names: + if x in self.allnames:continue + self.allnames.append(x) + self.tryfind(x) +def scan(self): + env=self.env + gruik=d_parser(env,self.generator.includes_nodes) + node=self.inputs[0] + gruik.start(node) + nodes=gruik.nodes + names=gruik.names + if Logs.verbose: + Logs.debug('deps: deps for %s: %r; unresolved %r'%(str(node),nodes,names)) + return(nodes,names) diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/dbus.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/dbus.py new file mode 100644 index 0000000..ccea278 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/dbus.py @@ -0,0 +1,29 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +from waflib import Task,Errors +from waflib.TaskGen import taskgen_method,before_method +@taskgen_method +def add_dbus_file(self,filename,prefix,mode): + if not hasattr(self,'dbus_lst'): + self.dbus_lst=[] + if not'process_dbus'in self.meths: + self.meths.append('process_dbus') + self.dbus_lst.append([filename,prefix,mode]) +@before_method('apply_core') +def process_dbus(self): + for filename,prefix,mode in getattr(self,'dbus_lst',[]): + node=self.path.find_resource(filename) + if not node: + raise Errors.WafError('file not found '+filename) + tsk=self.create_task('dbus_binding_tool',node,node.change_ext('.h')) + tsk.env.DBUS_BINDING_TOOL_PREFIX=prefix + tsk.env.DBUS_BINDING_TOOL_MODE=mode +class dbus_binding_tool(Task.Task): + color='BLUE' + ext_out=['.h'] + run_str='${DBUS_BINDING_TOOL} --prefix=${DBUS_BINDING_TOOL_PREFIX} --mode=${DBUS_BINDING_TOOL_MODE} --output=${TGT} ${SRC}' + shell=True +def configure(conf): + dbus_binding_tool=conf.find_program('dbus-binding-tool',var='DBUS_BINDING_TOOL') diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/dmd.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/dmd.py new file mode 100644 index 0000000..b6e3303 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/dmd.py @@ -0,0 +1,51 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import sys +from waflib.Tools import ar,d +from waflib.Configure import conf +@conf +def find_dmd(conf): + conf.find_program(['dmd','dmd2','ldc'],var='D') + out=conf.cmd_and_log([conf.env.D,'--help']) + if out.find("D Compiler v")==-1: + out=conf.cmd_and_log([conf.env.D,'-version']) + if out.find("based on DMD v1.")==-1: + conf.fatal("detected compiler is not dmd/ldc") +@conf +def common_flags_ldc(conf): + v=conf.env + v['DFLAGS']=['-d-version=Posix'] + v['LINKFLAGS']=[] + v['DFLAGS_dshlib']=['-relocation-model=pic'] +@conf +def common_flags_dmd(conf): + v=conf.env + v['D_SRC_F']=['-c'] + v['D_TGT_F']='-of%s' + v['D_LINKER']=v['D'] + v['DLNK_SRC_F']='' + v['DLNK_TGT_F']='-of%s' + v['DINC_ST']='-I%s' + v['DSHLIB_MARKER']=v['DSTLIB_MARKER']='' + v['DSTLIB_ST']=v['DSHLIB_ST']='-L-l%s' + v['DSTLIBPATH_ST']=v['DLIBPATH_ST']='-L-L%s' + v['LINKFLAGS_dprogram']=['-quiet'] + v['DFLAGS_dshlib']=['-fPIC'] + v['LINKFLAGS_dshlib']=['-L-shared'] + v['DHEADER_ext']='.di' + v.DFLAGS_d_with_header=['-H','-Hf'] + v['D_HDR_F']='%s' +def configure(conf): + conf.find_dmd() + if sys.platform=='win32': + out=conf.cmd_and_log([conf.env.D,'--help']) + if out.find("D Compiler v2.")>-1: + conf.fatal('dmd2 on Windows is not supported, use gdc or ldc2 instead') + conf.load('ar') + conf.load('d') + conf.common_flags_dmd() + conf.d_platform_flags() + if str(conf.env.D).find('ldc')>-1: + conf.common_flags_ldc() diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/errcheck.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/errcheck.py new file mode 100644 index 0000000..3b06493 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/errcheck.py @@ -0,0 +1,161 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +typos={'feature':'features','sources':'source','targets':'target','include':'includes','export_include':'export_includes','define':'defines','importpath':'includes','installpath':'install_path','iscopy':'is_copy',} +meths_typos=['__call__','program','shlib','stlib','objects'] +from waflib import Logs,Build,Node,Task,TaskGen,ConfigSet,Errors,Utils +import waflib.Tools.ccroot +def check_same_targets(self): + mp=Utils.defaultdict(list) + uids={} + def check_task(tsk): + if not isinstance(tsk,Task.Task): + return + for node in tsk.outputs: + mp[node].append(tsk) + try: + uids[tsk.uid()].append(tsk) + except KeyError: + uids[tsk.uid()]=[tsk] + for g in self.groups: + for tg in g: + try: + for tsk in tg.tasks: + check_task(tsk) + except AttributeError: + check_task(tg) + dupe=False + for(k,v)in mp.items(): + if len(v)>1: + dupe=True + msg='* Node %r is created more than once%s. The task generators are:'%(k,Logs.verbose==1 and" (full message on 'waf -v -v')"or"") + Logs.error(msg) + for x in v: + if Logs.verbose>1: + Logs.error(' %d. %r'%(1+v.index(x),x.generator)) + else: + Logs.error(' %d. %r in %r'%(1+v.index(x),x.generator.name,getattr(x.generator,'path',None))) + if not dupe: + for(k,v)in uids.items(): + if len(v)>1: + Logs.error('* Several tasks use the same identifier. Please check the information on\n http://docs.waf.googlecode.com/git/apidocs_16/Task.html#waflib.Task.Task.uid') + for tsk in v: + Logs.error(' - object %r (%r) defined in %r'%(tsk.__class__.__name__,tsk,tsk.generator)) +def check_invalid_constraints(self): + feat=set([]) + for x in list(TaskGen.feats.values()): + feat.union(set(x)) + for(x,y)in TaskGen.task_gen.prec.items(): + feat.add(x) + feat.union(set(y)) + ext=set([]) + for x in TaskGen.task_gen.mappings.values(): + ext.add(x.__name__) + invalid=ext&feat + if invalid: + Logs.error('The methods %r have invalid annotations: @extension <-> @feature/@before_method/@after_method'%list(invalid)) + for cls in list(Task.classes.values()): + for x in('before','after'): + for y in Utils.to_list(getattr(cls,x,[])): + if not Task.classes.get(y,None): + Logs.error('Erroneous order constraint %r=%r on task class %r'%(x,y,cls.__name__)) + if getattr(cls,'rule',None): + Logs.error('Erroneous attribute "rule" on task class %r (rename to "run_str")'%cls.__name__) +def replace(m): + oldcall=getattr(Build.BuildContext,m) + def call(self,*k,**kw): + ret=oldcall(self,*k,**kw) + for x in typos: + if x in kw: + if x=='iscopy'and'subst'in getattr(self,'features',''): + continue + err=True + Logs.error('Fix the typo %r -> %r on %r'%(x,typos[x],ret)) + return ret + setattr(Build.BuildContext,m,call) +def enhance_lib(): + for m in meths_typos: + replace(m) + def ant_glob(self,*k,**kw): + if k: + lst=Utils.to_list(k[0]) + for pat in lst: + if'..'in pat.split('/'): + Logs.error("In ant_glob pattern %r: '..' means 'two dots', not 'parent directory'"%k[0]) + if kw.get('remove',True): + try: + if self.is_child_of(self.ctx.bldnode)and not kw.get('quiet',False): + Logs.error('Using ant_glob on the build folder (%r) is dangerous (quiet=True to disable this warning)'%self) + except AttributeError: + pass + return self.old_ant_glob(*k,**kw) + Node.Node.old_ant_glob=Node.Node.ant_glob + Node.Node.ant_glob=ant_glob + old=Task.is_before + def is_before(t1,t2): + ret=old(t1,t2) + if ret and old(t2,t1): + Logs.error('Contradictory order constraints in classes %r %r'%(t1,t2)) + return ret + Task.is_before=is_before + def check_err_features(self): + lst=self.to_list(self.features) + if'shlib'in lst: + Logs.error('feature shlib -> cshlib, dshlib or cxxshlib') + for x in('c','cxx','d','fc'): + if not x in lst and lst and lst[0]in[x+y for y in('program','shlib','stlib')]: + Logs.error('%r features is probably missing %r'%(self,x)) + TaskGen.feature('*')(check_err_features) + def check_err_order(self): + if not hasattr(self,'rule')and not'subst'in Utils.to_list(self.features): + for x in('before','after','ext_in','ext_out'): + if hasattr(self,x): + Logs.warn('Erroneous order constraint %r on non-rule based task generator %r'%(x,self)) + else: + for x in('before','after'): + for y in self.to_list(getattr(self,x,[])): + if not Task.classes.get(y,None): + Logs.error('Erroneous order constraint %s=%r on %r (no such class)'%(x,y,self)) + TaskGen.feature('*')(check_err_order) + def check_compile(self): + check_invalid_constraints(self) + try: + ret=self.orig_compile() + finally: + check_same_targets(self) + return ret + Build.BuildContext.orig_compile=Build.BuildContext.compile + Build.BuildContext.compile=check_compile + def use_rec(self,name,**kw): + try: + y=self.bld.get_tgen_by_name(name) + except Errors.WafError: + pass + else: + idx=self.bld.get_group_idx(self) + odx=self.bld.get_group_idx(y) + if odx>idx: + msg="Invalid 'use' across build groups:" + if Logs.verbose>1: + msg+='\n target %r\n uses:\n %r'%(self,y) + else: + msg+=" %r uses %r (try 'waf -v -v' for the full error)"%(self.name,name) + raise Errors.WafError(msg) + self.orig_use_rec(name,**kw) + TaskGen.task_gen.orig_use_rec=TaskGen.task_gen.use_rec + TaskGen.task_gen.use_rec=use_rec + def getattri(self,name,default=None): + if name=='append'or name=='add': + raise Errors.WafError('env.append and env.add do not exist: use env.append_value/env.append_unique') + elif name=='prepend': + raise Errors.WafError('env.prepend does not exist: use env.prepend_value') + if name in self.__slots__: + return object.__getattr__(self,name,default) + else: + return self[name] + ConfigSet.ConfigSet.__getattr__=getattri +def options(opt): + enhance_lib() +def configure(conf): + pass diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/fc.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/fc.py new file mode 100644 index 0000000..3589799 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/fc.py @@ -0,0 +1,116 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import re +from waflib import Utils,Task,TaskGen,Logs +from waflib.Tools import ccroot,fc_config,fc_scan +from waflib.TaskGen import feature,before_method,after_method,extension +from waflib.Configure import conf +ccroot.USELIB_VARS['fc']=set(['FCFLAGS','DEFINES','INCLUDES']) +ccroot.USELIB_VARS['fcprogram_test']=ccroot.USELIB_VARS['fcprogram']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS']) +ccroot.USELIB_VARS['fcshlib']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS']) +ccroot.USELIB_VARS['fcstlib']=set(['ARFLAGS','LINKDEPS']) +@feature('fcprogram','fcshlib','fcstlib','fcprogram_test') +def dummy(self): + pass +@extension('.f','.f90','.F','.F90','.for','.FOR') +def fc_hook(self,node): + return self.create_compiled_task('fc',node) +@conf +def modfile(conf,name): + return{'lower':name.lower()+'.mod','lower.MOD':name.upper()+'.MOD','UPPER.mod':name.upper()+'.mod','UPPER':name.upper()+'.MOD'}[conf.env.FC_MOD_CAPITALIZATION or'lower'] +def get_fortran_tasks(tsk): + bld=tsk.generator.bld + tasks=bld.get_tasks_group(bld.get_group_idx(tsk.generator)) + return[x for x in tasks if isinstance(x,fc)and not getattr(x,'nomod',None)and not getattr(x,'mod_fortran_done',None)] +class fc(Task.Task): + color='GREEN' + run_str='${FC} ${FCFLAGS} ${FCINCPATH_ST:INCPATHS} ${FCDEFINES_ST:DEFINES} ${_FCMODOUTFLAGS} ${FC_TGT_F}${TGT[0].abspath()} ${FC_SRC_F}${SRC[0].abspath()}' + vars=["FORTRANMODPATHFLAG"] + def scan(self): + tmp=fc_scan.fortran_parser(self.generator.includes_nodes) + tmp.task=self + tmp.start(self.inputs[0]) + if Logs.verbose: + Logs.debug('deps: deps for %r: %r; unresolved %r'%(self.inputs,tmp.nodes,tmp.names)) + return(tmp.nodes,tmp.names) + def runnable_status(self): + if getattr(self,'mod_fortran_done',None): + return super(fc,self).runnable_status() + bld=self.generator.bld + lst=get_fortran_tasks(self) + for tsk in lst: + tsk.mod_fortran_done=True + for tsk in lst: + ret=tsk.runnable_status() + if ret==Task.ASK_LATER: + for x in lst: + x.mod_fortran_done=None + return Task.ASK_LATER + ins=Utils.defaultdict(set) + outs=Utils.defaultdict(set) + for tsk in lst: + key=tsk.uid() + for x in bld.raw_deps[key]: + if x.startswith('MOD@'): + name=bld.modfile(x.replace('MOD@','')) + node=bld.srcnode.find_or_declare(name) + tsk.set_outputs(node) + outs[id(node)].add(tsk) + for tsk in lst: + key=tsk.uid() + for x in bld.raw_deps[key]: + if x.startswith('USE@'): + name=bld.modfile(x.replace('USE@','')) + node=bld.srcnode.find_resource(name) + if node and node not in tsk.outputs: + if not node in bld.node_deps[key]: + bld.node_deps[key].append(node) + ins[id(node)].add(tsk) + for k in ins.keys(): + for a in ins[k]: + a.run_after.update(outs[k]) + tmp=[] + for t in outs[k]: + tmp.extend(t.outputs) + a.dep_nodes.extend(tmp) + a.dep_nodes.sort(key=lambda x:x.abspath()) + for tsk in lst: + try: + delattr(tsk,'cache_sig') + except AttributeError: + pass + return super(fc,self).runnable_status() +class fcprogram(ccroot.link_task): + color='YELLOW' + run_str='${FC} ${LINKFLAGS} ${FCLNK_SRC_F}${SRC} ${FCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FCSTLIB_MARKER} ${FCSTLIBPATH_ST:STLIBPATH} ${FCSTLIB_ST:STLIB} ${FCSHLIB_MARKER} ${FCLIBPATH_ST:LIBPATH} ${FCLIB_ST:LIB}' + inst_to='${BINDIR}' +class fcshlib(fcprogram): + inst_to='${LIBDIR}' +class fcprogram_test(fcprogram): + def can_retrieve_cache(self): + return False + def runnable_status(self): + ret=super(fcprogram_test,self).runnable_status() + if ret==Task.SKIP_ME: + ret=Task.RUN_ME + return ret + def exec_command(self,cmd,**kw): + bld=self.generator.bld + kw['shell']=isinstance(cmd,str) + kw['stdout']=kw['stderr']=Utils.subprocess.PIPE + kw['cwd']=bld.variant_dir + bld.out=bld.err='' + bld.to_log('command: %s\n'%cmd) + kw['output']=0 + try: + (bld.out,bld.err)=bld.cmd_and_log(cmd,**kw) + except Exception ,e: + return-1 + if bld.out: + bld.to_log("out: %s\n"%bld.out) + if bld.err: + bld.to_log("err: %s\n"%bld.err) +class fcstlib(ccroot.stlink_task): + pass diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/fc_config.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/fc_config.py new file mode 100644 index 0000000..580eac7 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/fc_config.py @@ -0,0 +1,285 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import re,shutil,os,sys,string,shlex +from waflib.Configure import conf +from waflib.TaskGen import feature,after_method,before_method +from waflib import Build,Utils +FC_FRAGMENT=' program main\n end program main\n' +FC_FRAGMENT2=' PROGRAM MAIN\n END\n' +@conf +def fc_flags(conf): + v=conf.env + v['FC_SRC_F']=[] + v['FC_TGT_F']=['-c','-o'] + v['FCINCPATH_ST']='-I%s' + v['FCDEFINES_ST']='-D%s' + if not v['LINK_FC']:v['LINK_FC']=v['FC'] + v['FCLNK_SRC_F']=[] + v['FCLNK_TGT_F']=['-o'] + v['FCFLAGS_fcshlib']=['-fpic'] + v['LINKFLAGS_fcshlib']=['-shared'] + v['fcshlib_PATTERN']='lib%s.so' + v['fcstlib_PATTERN']='lib%s.a' + v['FCLIB_ST']='-l%s' + v['FCLIBPATH_ST']='-L%s' + v['FCSTLIB_ST']='-l%s' + v['FCSTLIBPATH_ST']='-L%s' + v['FCSTLIB_MARKER']='-Wl,-Bstatic' + v['FCSHLIB_MARKER']='-Wl,-Bdynamic' + v['SONAME_ST']='-Wl,-h,%s' +@conf +def fc_add_flags(conf): + conf.add_os_flags('FCFLAGS') + conf.add_os_flags('LDFLAGS','LINKFLAGS') +@conf +def check_fortran(self,*k,**kw): + self.check_cc(fragment=FC_FRAGMENT,compile_filename='test.f',features='fc fcprogram',msg='Compiling a simple fortran app') +@conf +def check_fc(self,*k,**kw): + kw['compiler']='fc' + if not'compile_mode'in kw: + kw['compile_mode']='fc' + if not'type'in kw: + kw['type']='fcprogram' + if not'compile_filename'in kw: + kw['compile_filename']='test.f90' + if not'code'in kw: + kw['code']=FC_FRAGMENT + return self.check(*k,**kw) +@conf +def fortran_modifier_darwin(conf): + v=conf.env + v['FCFLAGS_fcshlib']=['-fPIC'] + v['LINKFLAGS_fcshlib']=['-dynamiclib','-Wl,-compatibility_version,1','-Wl,-current_version,1'] + v['fcshlib_PATTERN']='lib%s.dylib' + v['FRAMEWORKPATH_ST']='-F%s' + v['FRAMEWORK_ST']='-framework %s' + v['LINKFLAGS_fcstlib']=[] + v['FCSHLIB_MARKER']='' + v['FCSTLIB_MARKER']='' + v['SONAME_ST']='' +@conf +def fortran_modifier_win32(conf): + v=conf.env + v['fcprogram_PATTERN']=v['fcprogram_test_PATTERN']='%s.exe' + v['fcshlib_PATTERN']='%s.dll' + v['implib_PATTERN']='lib%s.dll.a' + v['IMPLIB_ST']='-Wl,--out-implib,%s' + v['FCFLAGS_fcshlib']=[] + v.append_value('FCFLAGS_fcshlib',['-DDLL_EXPORT']) + v.append_value('LINKFLAGS',['-Wl,--enable-auto-import']) +@conf +def fortran_modifier_cygwin(conf): + fortran_modifier_win32(conf) + v=conf.env + v['fcshlib_PATTERN']='cyg%s.dll' + v.append_value('LINKFLAGS_fcshlib',['-Wl,--enable-auto-image-base']) + v['FCFLAGS_fcshlib']=[] +@conf +def check_fortran_dummy_main(self,*k,**kw): + if not self.env.CC: + self.fatal('A c compiler is required for check_fortran_dummy_main') + lst=['MAIN__','__MAIN','_MAIN','MAIN_','MAIN'] + lst.extend([m.lower()for m in lst]) + lst.append('') + self.start_msg('Detecting whether we need a dummy main') + for main in lst: + kw['fortran_main']=main + try: + self.check_cc(fragment='int %s() { return 0; }\n'%(main or'test'),features='c fcprogram',mandatory=True) + if not main: + self.env.FC_MAIN=-1 + self.end_msg('no') + else: + self.env.FC_MAIN=main + self.end_msg('yes %s'%main) + break + except self.errors.ConfigurationError: + pass + else: + self.end_msg('not found') + self.fatal('could not detect whether fortran requires a dummy main, see the config.log') +GCC_DRIVER_LINE=re.compile('^Driving:') +POSIX_STATIC_EXT=re.compile('\S+\.a') +POSIX_LIB_FLAGS=re.compile('-l\S+') +@conf +def is_link_verbose(self,txt): + assert isinstance(txt,str) + for line in txt.splitlines(): + if not GCC_DRIVER_LINE.search(line): + if POSIX_STATIC_EXT.search(line)or POSIX_LIB_FLAGS.search(line): + return True + return False +@conf +def check_fortran_verbose_flag(self,*k,**kw): + self.start_msg('fortran link verbose flag') + for x in['-v','--verbose','-verbose','-V']: + try: + self.check_cc(features='fc fcprogram_test',fragment=FC_FRAGMENT2,compile_filename='test.f',linkflags=[x],mandatory=True) + except self.errors.ConfigurationError: + pass + else: + if self.is_link_verbose(self.test_bld.err)or self.is_link_verbose(self.test_bld.out): + self.end_msg(x) + break + else: + self.end_msg('failure') + self.fatal('Could not obtain the fortran link verbose flag (see config.log)') + self.env.FC_VERBOSE_FLAG=x + return x +LINKFLAGS_IGNORED=[r'-lang*',r'-lcrt[a-zA-Z0-9\.]*\.o',r'-lc$',r'-lSystem',r'-libmil',r'-LIST:*',r'-LNO:*'] +if os.name=='nt': + LINKFLAGS_IGNORED.extend([r'-lfrt*',r'-luser32',r'-lkernel32',r'-ladvapi32',r'-lmsvcrt',r'-lshell32',r'-lmingw',r'-lmoldname']) +else: + LINKFLAGS_IGNORED.append(r'-lgcc*') +RLINKFLAGS_IGNORED=[re.compile(f)for f in LINKFLAGS_IGNORED] +def _match_ignore(line): + for i in RLINKFLAGS_IGNORED: + if i.match(line): + return True + return False +def parse_fortran_link(lines): + final_flags=[] + for line in lines: + if not GCC_DRIVER_LINE.match(line): + _parse_flink_line(line,final_flags) + return final_flags +SPACE_OPTS=re.compile('^-[LRuYz]$') +NOSPACE_OPTS=re.compile('^-[RL]') +def _parse_flink_line(line,final_flags): + lexer=shlex.shlex(line,posix=True) + lexer.whitespace_split=True + t=lexer.get_token() + tmp_flags=[] + while t: + def parse(token): + if _match_ignore(token): + pass + elif token.startswith('-lkernel32')and sys.platform=='cygwin': + tmp_flags.append(token) + elif SPACE_OPTS.match(token): + t=lexer.get_token() + if t.startswith('P,'): + t=t[2:] + for opt in t.split(os.pathsep): + tmp_flags.append('-L%s'%opt) + elif NOSPACE_OPTS.match(token): + tmp_flags.append(token) + elif POSIX_LIB_FLAGS.match(token): + tmp_flags.append(token) + else: + pass + t=lexer.get_token() + return t + t=parse(t) + final_flags.extend(tmp_flags) + return final_flags +@conf +def check_fortran_clib(self,autoadd=True,*k,**kw): + if not self.env.FC_VERBOSE_FLAG: + self.fatal('env.FC_VERBOSE_FLAG is not set: execute check_fortran_verbose_flag?') + self.start_msg('Getting fortran runtime link flags') + try: + self.check_cc(fragment=FC_FRAGMENT2,compile_filename='test.f',features='fc fcprogram_test',linkflags=[self.env.FC_VERBOSE_FLAG]) + except Exception: + self.end_msg(False) + if kw.get('mandatory',True): + conf.fatal('Could not find the c library flags') + else: + out=self.test_bld.err + flags=parse_fortran_link(out.splitlines()) + self.end_msg('ok (%s)'%' '.join(flags)) + self.env.LINKFLAGS_CLIB=flags + return flags + return[] +def getoutput(conf,cmd,stdin=False): + if stdin: + stdin=Utils.subprocess.PIPE + else: + stdin=None + env=conf.env.env or None + try: + p=Utils.subprocess.Popen(cmd,stdin=stdin,stdout=Utils.subprocess.PIPE,stderr=Utils.subprocess.PIPE,env=env) + if stdin: + p.stdin.write('\n') + out,err=p.communicate() + except Exception: + conf.fatal('could not determine the compiler version %r'%cmd) + if not isinstance(out,str): + out=out.decode(sys.stdout.encoding or'iso8859-1') + if not isinstance(err,str): + err=err.decode(sys.stdout.encoding or'iso8859-1') + return(out,err) +ROUTINES_CODE="""\ + subroutine foobar() + return + end + subroutine foo_bar() + return + end +""" +MAIN_CODE=""" +void %(dummy_func_nounder)s(void); +void %(dummy_func_under)s(void); +int %(main_func_name)s() { + %(dummy_func_nounder)s(); + %(dummy_func_under)s(); + return 0; +} +""" +@feature('link_main_routines_func') +@before_method('process_source') +def link_main_routines_tg_method(self): + def write_test_file(task): + task.outputs[0].write(task.generator.code) + bld=self.bld + bld(rule=write_test_file,target='main.c',code=MAIN_CODE%self.__dict__) + bld(rule=write_test_file,target='test.f',code=ROUTINES_CODE) + bld(features='fc fcstlib',source='test.f',target='test') + bld(features='c fcprogram',source='main.c',target='app',use='test') +def mangling_schemes(): + for u in['_','']: + for du in['','_']: + for c in["lower","upper"]: + yield(u,du,c) +def mangle_name(u,du,c,name): + return getattr(name,c)()+u+(name.find('_')!=-1 and du or'') +@conf +def check_fortran_mangling(self,*k,**kw): + if not self.env.CC: + self.fatal('A c compiler is required for link_main_routines') + if not self.env.FC: + self.fatal('A fortran compiler is required for link_main_routines') + if not self.env.FC_MAIN: + self.fatal('Checking for mangling requires self.env.FC_MAIN (execute "check_fortran_dummy_main" first?)') + self.start_msg('Getting fortran mangling scheme') + for(u,du,c)in mangling_schemes(): + try: + self.check_cc(compile_filename=[],features='link_main_routines_func',msg='nomsg',errmsg='nomsg',mandatory=True,dummy_func_nounder=mangle_name(u,du,c,"foobar"),dummy_func_under=mangle_name(u,du,c,"foo_bar"),main_func_name=self.env.FC_MAIN) + except self.errors.ConfigurationError: + pass + else: + self.end_msg("ok ('%s', '%s', '%s-case')"%(u,du,c)) + self.env.FORTRAN_MANGLING=(u,du,c) + break + else: + self.end_msg(False) + self.fatal('mangler not found') + return(u,du,c) +@feature('pyext') +@before_method('propagate_uselib_vars','apply_link') +def set_lib_pat(self): + self.env['fcshlib_PATTERN']=self.env['pyext_PATTERN'] +@conf +def detect_openmp(self): + for x in['-fopenmp','-openmp','-mp','-xopenmp','-omp','-qsmp=omp']: + try: + self.check_fc(msg='Checking for OpenMP flag %s'%x,fragment='program main\n call omp_get_num_threads()\nend program main',fcflags=x,linkflags=x,uselib_store='OPENMP') + except self.errors.ConfigurationError: + pass + else: + break + else: + self.fatal('Could not find OpenMP') diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/fc_scan.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/fc_scan.py new file mode 100644 index 0000000..e4e2344 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/fc_scan.py @@ -0,0 +1,68 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import re +from waflib import Utils,Task,TaskGen,Logs +from waflib.TaskGen import feature,before_method,after_method,extension +from waflib.Configure import conf +INC_REGEX="""(?:^|['">]\s*;)\s*(?:|#\s*)INCLUDE\s+(?:\w+_)?[<"'](.+?)(?=["'>])""" +USE_REGEX="""(?:^|;)\s*USE(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)""" +MOD_REGEX="""(?:^|;)\s*MODULE(?!\s*PROCEDURE)(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)""" +re_inc=re.compile(INC_REGEX,re.I) +re_use=re.compile(USE_REGEX,re.I) +re_mod=re.compile(MOD_REGEX,re.I) +class fortran_parser(object): + def __init__(self,incpaths): + self.seen=[] + self.nodes=[] + self.names=[] + self.incpaths=incpaths + def find_deps(self,node): + txt=node.read() + incs=[] + uses=[] + mods=[] + for line in txt.splitlines(): + m=re_inc.search(line) + if m: + incs.append(m.group(1)) + m=re_use.search(line) + if m: + uses.append(m.group(1)) + m=re_mod.search(line) + if m: + mods.append(m.group(1)) + return(incs,uses,mods) + def start(self,node): + self.waiting=[node] + while self.waiting: + nd=self.waiting.pop(0) + self.iter(nd) + def iter(self,node): + path=node.abspath() + incs,uses,mods=self.find_deps(node) + for x in incs: + if x in self.seen: + continue + self.seen.append(x) + self.tryfind_header(x) + for x in uses: + name="USE@%s"%x + if not name in self.names: + self.names.append(name) + for x in mods: + name="MOD@%s"%x + if not name in self.names: + self.names.append(name) + def tryfind_header(self,filename): + found=None + for n in self.incpaths: + found=n.find_resource(filename) + if found: + self.nodes.append(found) + self.waiting.append(found) + break + if not found: + if not filename in self.names: + self.names.append(filename) diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/flex.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/flex.py new file mode 100644 index 0000000..13f6207 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/flex.py @@ -0,0 +1,32 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import waflib.TaskGen,os,re +def decide_ext(self,node): + if'cxx'in self.features: + return['.lex.cc'] + return['.lex.c'] +def flexfun(tsk): + env=tsk.env + bld=tsk.generator.bld + wd=bld.variant_dir + def to_list(xx): + if isinstance(xx,str):return[xx] + return xx + tsk.last_cmd=lst=[] + lst.extend(to_list(env['FLEX'])) + lst.extend(to_list(env['FLEXFLAGS'])) + inputs=[a.path_from(bld.bldnode)for a in tsk.inputs] + if env.FLEX_MSYS: + inputs=[x.replace(os.sep,'/')for x in inputs] + lst.extend(inputs) + lst=[x for x in lst if x] + txt=bld.cmd_and_log(lst,cwd=wd,env=env.env or None,quiet=0) + tsk.outputs[0].write(txt.replace('\r\n','\n').replace('\r','\n')) +waflib.TaskGen.declare_chain(name='flex',rule=flexfun,ext_in='.l',decider=decide_ext,) +def configure(conf): + conf.find_program('flex',var='FLEX') + conf.env.FLEXFLAGS=['-t'] + if re.search(r"\\msys\\[0-9.]+\\bin\\flex.exe$",conf.env.FLEX): + conf.env.FLEX_MSYS=True diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/g95.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/g95.py new file mode 100644 index 0000000..9bc331a --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/g95.py @@ -0,0 +1,55 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import re +from waflib import Utils +from waflib.Tools import fc,fc_config,fc_scan,ar +from waflib.Configure import conf +@conf +def find_g95(conf): + fc=conf.find_program('g95',var='FC') + fc=conf.cmd_to_list(fc) + conf.get_g95_version(fc) + conf.env.FC_NAME='G95' +@conf +def g95_flags(conf): + v=conf.env + v['FCFLAGS_fcshlib']=['-fPIC'] + v['FORTRANMODFLAG']=['-fmod=',''] + v['FCFLAGS_DEBUG']=['-Werror'] +@conf +def g95_modifier_win32(conf): + fc_config.fortran_modifier_win32(conf) +@conf +def g95_modifier_cygwin(conf): + fc_config.fortran_modifier_cygwin(conf) +@conf +def g95_modifier_darwin(conf): + fc_config.fortran_modifier_darwin(conf) +@conf +def g95_modifier_platform(conf): + dest_os=conf.env['DEST_OS']or Utils.unversioned_sys_platform() + g95_modifier_func=getattr(conf,'g95_modifier_'+dest_os,None) + if g95_modifier_func: + g95_modifier_func() +@conf +def get_g95_version(conf,fc): + version_re=re.compile(r"g95\s*(?P<major>\d*)\.(?P<minor>\d*)").search + cmd=fc+['--version'] + out,err=fc_config.getoutput(conf,cmd,stdin=False) + if out: + match=version_re(out) + else: + match=version_re(err) + if not match: + conf.fatal('cannot determine g95 version') + k=match.groupdict() + conf.env['FC_VERSION']=(k['major'],k['minor']) +def configure(conf): + conf.find_g95() + conf.find_ar() + conf.fc_flags() + conf.fc_add_flags() + conf.g95_flags() + conf.g95_modifier_platform() diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gas.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gas.py new file mode 100644 index 0000000..b714ca1 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gas.py @@ -0,0 +1,12 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import waflib.Tools.asm +from waflib.Tools import ar +def configure(conf): + conf.find_program(['gas','gcc'],var='AS') + conf.env.AS_TGT_F=['-c','-o'] + conf.env.ASLNK_TGT_F=['-o'] + conf.find_ar() + conf.load('asm') diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gcc.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gcc.py new file mode 100644 index 0000000..aca49ca --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gcc.py @@ -0,0 +1,98 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +from waflib.Tools import ccroot,ar +from waflib.Configure import conf +@conf +def find_gcc(conf): + cc=conf.find_program(['gcc','cc'],var='CC') + cc=conf.cmd_to_list(cc) + conf.get_cc_version(cc,gcc=True) + conf.env.CC_NAME='gcc' + conf.env.CC=cc +@conf +def gcc_common_flags(conf): + v=conf.env + v['CC_SRC_F']=[] + v['CC_TGT_F']=['-c','-o'] + if not v['LINK_CC']:v['LINK_CC']=v['CC'] + v['CCLNK_SRC_F']=[] + v['CCLNK_TGT_F']=['-o'] + v['CPPPATH_ST']='-I%s' + v['DEFINES_ST']='-D%s' + v['LIB_ST']='-l%s' + v['LIBPATH_ST']='-L%s' + v['STLIB_ST']='-l%s' + v['STLIBPATH_ST']='-L%s' + v['RPATH_ST']='-Wl,-rpath,%s' + v['SONAME_ST']='-Wl,-h,%s' + v['SHLIB_MARKER']='-Wl,-Bdynamic' + v['STLIB_MARKER']='-Wl,-Bstatic' + v['cprogram_PATTERN']='%s' + v['CFLAGS_cshlib']=['-fPIC'] + v['LINKFLAGS_cshlib']=['-shared'] + v['cshlib_PATTERN']='lib%s.so' + v['LINKFLAGS_cstlib']=['-Wl,-Bstatic'] + v['cstlib_PATTERN']='lib%s.a' + v['LINKFLAGS_MACBUNDLE']=['-bundle','-undefined','dynamic_lookup'] + v['CFLAGS_MACBUNDLE']=['-fPIC'] + v['macbundle_PATTERN']='%s.bundle' +@conf +def gcc_modifier_win32(conf): + v=conf.env + v['cprogram_PATTERN']='%s.exe' + v['cshlib_PATTERN']='%s.dll' + v['implib_PATTERN']='lib%s.dll.a' + v['IMPLIB_ST']='-Wl,--out-implib,%s' + v['CFLAGS_cshlib']=[] + v.append_value('LINKFLAGS',['-Wl,--enable-auto-import']) +@conf +def gcc_modifier_cygwin(conf): + gcc_modifier_win32(conf) + v=conf.env + v['cshlib_PATTERN']='cyg%s.dll' + v.append_value('LINKFLAGS_cshlib',['-Wl,--enable-auto-image-base']) + v['CFLAGS_cshlib']=[] +@conf +def gcc_modifier_darwin(conf): + v=conf.env + v['CFLAGS_cshlib']=['-fPIC'] + v['LINKFLAGS_cshlib']=['-dynamiclib','-Wl,-compatibility_version,1','-Wl,-current_version,1'] + v['cshlib_PATTERN']='lib%s.dylib' + v['FRAMEWORKPATH_ST']='-F%s' + v['FRAMEWORK_ST']=['-framework'] + v['ARCH_ST']=['-arch'] + v['LINKFLAGS_cstlib']=[] + v['SHLIB_MARKER']=[] + v['STLIB_MARKER']=[] + v['SONAME_ST']=[] +@conf +def gcc_modifier_aix(conf): + v=conf.env + v['LINKFLAGS_cprogram']=['-Wl,-brtl'] + v['LINKFLAGS_cshlib']=['-shared','-Wl,-brtl,-bexpfull'] + v['SHLIB_MARKER']=[] +@conf +def gcc_modifier_hpux(conf): + v=conf.env + v['SHLIB_MARKER']=[] + v['STLIB_MARKER']='-Bstatic' + v['CFLAGS_cshlib']=['-fPIC','-DPIC'] + v['cshlib_PATTERN']='lib%s.sl' +@conf +def gcc_modifier_openbsd(conf): + conf.env.SONAME_ST=[] +@conf +def gcc_modifier_platform(conf): + gcc_modifier_func=getattr(conf,'gcc_modifier_'+conf.env.DEST_OS,None) + if gcc_modifier_func: + gcc_modifier_func() +def configure(conf): + conf.find_gcc() + conf.find_ar() + conf.gcc_common_flags() + conf.gcc_modifier_platform() + conf.cc_load_tools() + conf.cc_add_flags() + conf.link_add_flags() diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gcc.pyc b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gcc.pyc Binary files differnew file mode 100644 index 0000000..86f9ce5 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gcc.pyc diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gdc.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gdc.py new file mode 100644 index 0000000..da966ec --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gdc.py @@ -0,0 +1,36 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import sys +from waflib.Tools import ar,d +from waflib.Configure import conf +@conf +def find_gdc(conf): + conf.find_program('gdc',var='D') + out=conf.cmd_and_log([conf.env.D,'--version']) + if out.find("gdc ")==-1: + conf.fatal("detected compiler is not gdc") +@conf +def common_flags_gdc(conf): + v=conf.env + v['DFLAGS']=[] + v['D_SRC_F']=['-c'] + v['D_TGT_F']='-o%s' + v['D_LINKER']=v['D'] + v['DLNK_SRC_F']='' + v['DLNK_TGT_F']='-o%s' + v['DINC_ST']='-I%s' + v['DSHLIB_MARKER']=v['DSTLIB_MARKER']='' + v['DSTLIB_ST']=v['DSHLIB_ST']='-l%s' + v['DSTLIBPATH_ST']=v['DLIBPATH_ST']='-L%s' + v['LINKFLAGS_dshlib']=['-shared'] + v['DHEADER_ext']='.di' + v.DFLAGS_d_with_header='-fintfc' + v['D_HDR_F']='-fintfc-file=%s' +def configure(conf): + conf.find_gdc() + conf.load('ar') + conf.load('d') + conf.common_flags_gdc() + conf.d_platform_flags() diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gfortran.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gfortran.py new file mode 100644 index 0000000..854a93d --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gfortran.py @@ -0,0 +1,69 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import re +from waflib import Utils +from waflib.Tools import fc,fc_config,fc_scan,ar +from waflib.Configure import conf +@conf +def find_gfortran(conf): + fc=conf.find_program(['gfortran','g77'],var='FC') + fc=conf.cmd_to_list(fc) + conf.get_gfortran_version(fc) + conf.env.FC_NAME='GFORTRAN' +@conf +def gfortran_flags(conf): + v=conf.env + v['FCFLAGS_fcshlib']=['-fPIC'] + v['FORTRANMODFLAG']=['-J',''] + v['FCFLAGS_DEBUG']=['-Werror'] +@conf +def gfortran_modifier_win32(conf): + fc_config.fortran_modifier_win32(conf) +@conf +def gfortran_modifier_cygwin(conf): + fc_config.fortran_modifier_cygwin(conf) +@conf +def gfortran_modifier_darwin(conf): + fc_config.fortran_modifier_darwin(conf) +@conf +def gfortran_modifier_platform(conf): + dest_os=conf.env['DEST_OS']or Utils.unversioned_sys_platform() + gfortran_modifier_func=getattr(conf,'gfortran_modifier_'+dest_os,None) + if gfortran_modifier_func: + gfortran_modifier_func() +@conf +def get_gfortran_version(conf,fc): + version_re=re.compile(r"GNU\s*Fortran",re.I).search + cmd=fc+['--version'] + out,err=fc_config.getoutput(conf,cmd,stdin=False) + if out:match=version_re(out) + else:match=version_re(err) + if not match: + conf.fatal('Could not determine the compiler type') + cmd=fc+['-dM','-E','-'] + out,err=fc_config.getoutput(conf,cmd,stdin=True) + if out.find('__GNUC__')<0: + conf.fatal('Could not determine the compiler type') + k={} + out=out.split('\n') + import shlex + for line in out: + lst=shlex.split(line) + if len(lst)>2: + key=lst[1] + val=lst[2] + k[key]=val + def isD(var): + return var in k + def isT(var): + return var in k and k[var]!='0' + conf.env['FC_VERSION']=(k['__GNUC__'],k['__GNUC_MINOR__'],k['__GNUC_PATCHLEVEL__']) +def configure(conf): + conf.find_gfortran() + conf.find_ar() + conf.fc_flags() + conf.fc_add_flags() + conf.gfortran_flags() + conf.gfortran_modifier_platform() diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/glib2.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/glib2.py new file mode 100644 index 0000000..1d75510 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/glib2.py @@ -0,0 +1,173 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os +from waflib import Task,Utils,Options,Errors,Logs +from waflib.TaskGen import taskgen_method,before_method,after_method,feature +@taskgen_method +def add_marshal_file(self,filename,prefix): + if not hasattr(self,'marshal_list'): + self.marshal_list=[] + self.meths.append('process_marshal') + self.marshal_list.append((filename,prefix)) +@before_method('process_source') +def process_marshal(self): + for f,prefix in getattr(self,'marshal_list',[]): + node=self.path.find_resource(f) + if not node: + raise Errors.WafError('file not found %r'%f) + h_node=node.change_ext('.h') + c_node=node.change_ext('.c') + task=self.create_task('glib_genmarshal',node,[h_node,c_node]) + task.env.GLIB_GENMARSHAL_PREFIX=prefix + self.source=self.to_nodes(getattr(self,'source',[])) + self.source.append(c_node) +class glib_genmarshal(Task.Task): + def run(self): + bld=self.inputs[0].__class__.ctx + get=self.env.get_flat + cmd1="%s %s --prefix=%s --header > %s"%(get('GLIB_GENMARSHAL'),self.inputs[0].srcpath(),get('GLIB_GENMARSHAL_PREFIX'),self.outputs[0].abspath()) + ret=bld.exec_command(cmd1) + if ret:return ret + c='''#include "%s"\n'''%self.outputs[0].name + self.outputs[1].write(c) + cmd2="%s %s --prefix=%s --body >> %s"%(get('GLIB_GENMARSHAL'),self.inputs[0].srcpath(),get('GLIB_GENMARSHAL_PREFIX'),self.outputs[1].abspath()) + return bld.exec_command(cmd2) + vars=['GLIB_GENMARSHAL_PREFIX','GLIB_GENMARSHAL'] + color='BLUE' + ext_out=['.h'] +@taskgen_method +def add_enums_from_template(self,source='',target='',template='',comments=''): + if not hasattr(self,'enums_list'): + self.enums_list=[] + self.meths.append('process_enums') + self.enums_list.append({'source':source,'target':target,'template':template,'file-head':'','file-prod':'','file-tail':'','enum-prod':'','value-head':'','value-prod':'','value-tail':'','comments':comments}) +@taskgen_method +def add_enums(self,source='',target='',file_head='',file_prod='',file_tail='',enum_prod='',value_head='',value_prod='',value_tail='',comments=''): + if not hasattr(self,'enums_list'): + self.enums_list=[] + self.meths.append('process_enums') + self.enums_list.append({'source':source,'template':'','target':target,'file-head':file_head,'file-prod':file_prod,'file-tail':file_tail,'enum-prod':enum_prod,'value-head':value_head,'value-prod':value_prod,'value-tail':value_tail,'comments':comments}) +@before_method('process_source') +def process_enums(self): + for enum in getattr(self,'enums_list',[]): + task=self.create_task('glib_mkenums') + env=task.env + inputs=[] + source_list=self.to_list(enum['source']) + if not source_list: + raise Errors.WafError('missing source '+str(enum)) + source_list=[self.path.find_resource(k)for k in source_list] + inputs+=source_list + env['GLIB_MKENUMS_SOURCE']=[k.abspath()for k in source_list] + if not enum['target']: + raise Errors.WafError('missing target '+str(enum)) + tgt_node=self.path.find_or_declare(enum['target']) + if tgt_node.name.endswith('.c'): + self.source.append(tgt_node) + env['GLIB_MKENUMS_TARGET']=tgt_node.abspath() + options=[] + if enum['template']: + template_node=self.path.find_resource(enum['template']) + options.append('--template %s'%(template_node.abspath())) + inputs.append(template_node) + params={'file-head':'--fhead','file-prod':'--fprod','file-tail':'--ftail','enum-prod':'--eprod','value-head':'--vhead','value-prod':'--vprod','value-tail':'--vtail','comments':'--comments'} + for param,option in params.items(): + if enum[param]: + options.append('%s %r'%(option,enum[param])) + env['GLIB_MKENUMS_OPTIONS']=' '.join(options) + task.set_inputs(inputs) + task.set_outputs(tgt_node) +class glib_mkenums(Task.Task): + run_str='${GLIB_MKENUMS} ${GLIB_MKENUMS_OPTIONS} ${GLIB_MKENUMS_SOURCE} > ${GLIB_MKENUMS_TARGET}' + color='PINK' + ext_out=['.h'] +@taskgen_method +def add_settings_schemas(self,filename_list): + if not hasattr(self,'settings_schema_files'): + self.settings_schema_files=[] + if not isinstance(filename_list,list): + filename_list=[filename_list] + self.settings_schema_files.extend(filename_list) +@taskgen_method +def add_settings_enums(self,namespace,filename_list): + if hasattr(self,'settings_enum_namespace'): + raise Errors.WafError("Tried to add gsettings enums to '%s' more than once"%self.name) + self.settings_enum_namespace=namespace + if type(filename_list)!='list': + filename_list=[filename_list] + self.settings_enum_files=filename_list +def r_change_ext(self,ext): + name=self.name + k=name.rfind('.') + if k>=0: + name=name[:k]+ext + else: + name=name+ext + return self.parent.find_or_declare([name]) +@feature('glib2') +def process_settings(self): + enums_tgt_node=[] + install_files=[] + settings_schema_files=getattr(self,'settings_schema_files',[]) + if settings_schema_files and not self.env['GLIB_COMPILE_SCHEMAS']: + raise Errors.WafError("Unable to process GSettings schemas - glib-compile-schemas was not found during configure") + if hasattr(self,'settings_enum_files'): + enums_task=self.create_task('glib_mkenums') + source_list=self.settings_enum_files + source_list=[self.path.find_resource(k)for k in source_list] + enums_task.set_inputs(source_list) + enums_task.env['GLIB_MKENUMS_SOURCE']=[k.abspath()for k in source_list] + target=self.settings_enum_namespace+'.enums.xml' + tgt_node=self.path.find_or_declare(target) + enums_task.set_outputs(tgt_node) + enums_task.env['GLIB_MKENUMS_TARGET']=tgt_node.abspath() + enums_tgt_node=[tgt_node] + install_files.append(tgt_node) + options='--comments "<!-- @comment@ -->" --fhead "<schemalist>" --vhead " <@type@ id=\\"%s.@EnumName@\\">" --vprod " <value nick=\\"@valuenick@\\" value=\\"@valuenum@\\"/>" --vtail " </@type@>" --ftail "</schemalist>" '%(self.settings_enum_namespace) + enums_task.env['GLIB_MKENUMS_OPTIONS']=options + for schema in settings_schema_files: + schema_task=self.create_task('glib_validate_schema') + schema_node=self.path.find_resource(schema) + if not schema_node: + raise Errors.WafError("Cannot find the schema file '%s'"%schema) + install_files.append(schema_node) + source_list=enums_tgt_node+[schema_node] + schema_task.set_inputs(source_list) + schema_task.env['GLIB_COMPILE_SCHEMAS_OPTIONS']=[("--schema-file="+k.abspath())for k in source_list] + target_node=r_change_ext(schema_node,'.xml.valid') + schema_task.set_outputs(target_node) + schema_task.env['GLIB_VALIDATE_SCHEMA_OUTPUT']=target_node.abspath() + def compile_schemas_callback(bld): + if not bld.is_install:return + Logs.pprint('YELLOW','Updating GSettings schema cache') + command=Utils.subst_vars("${GLIB_COMPILE_SCHEMAS} ${GSETTINGSSCHEMADIR}",bld.env) + ret=self.bld.exec_command(command) + if self.bld.is_install: + if not self.env['GSETTINGSSCHEMADIR']: + raise Errors.WafError('GSETTINGSSCHEMADIR not defined (should have been set up automatically during configure)') + if install_files: + self.bld.install_files(self.env['GSETTINGSSCHEMADIR'],install_files) + if not hasattr(self.bld,'_compile_schemas_registered'): + self.bld.add_post_fun(compile_schemas_callback) + self.bld._compile_schemas_registered=True +class glib_validate_schema(Task.Task): + run_str='rm -f ${GLIB_VALIDATE_SCHEMA_OUTPUT} && ${GLIB_COMPILE_SCHEMAS} --dry-run ${GLIB_COMPILE_SCHEMAS_OPTIONS} && touch ${GLIB_VALIDATE_SCHEMA_OUTPUT}' + color='PINK' +def configure(conf): + conf.find_program('glib-genmarshal',var='GLIB_GENMARSHAL') + conf.find_perl_program('glib-mkenums',var='GLIB_MKENUMS') + conf.find_program('glib-compile-schemas',var='GLIB_COMPILE_SCHEMAS',mandatory=False) + def getstr(varname): + return getattr(Options.options,varname,getattr(conf.env,varname,'')) + gsettingsschemadir=getstr('GSETTINGSSCHEMADIR') + if not gsettingsschemadir: + datadir=getstr('DATADIR') + if not datadir: + prefix=conf.env['PREFIX'] + datadir=os.path.join(prefix,'share') + gsettingsschemadir=os.path.join(datadir,'glib-2.0','schemas') + conf.env['GSETTINGSSCHEMADIR']=gsettingsschemadir +def options(opt): + opt.add_option('--gsettingsschemadir',help='GSettings schema location [Default: ${datadir}/glib-2.0/schemas]',default='',dest='GSETTINGSSCHEMADIR') diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gnu_dirs.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gnu_dirs.py new file mode 100644 index 0000000..9c8a304 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gnu_dirs.py @@ -0,0 +1,65 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os +from waflib import Utils,Options,Context +_options=[x.split(', ')for x in''' +bindir, user executables, ${EXEC_PREFIX}/bin +sbindir, system admin executables, ${EXEC_PREFIX}/sbin +libexecdir, program executables, ${EXEC_PREFIX}/libexec +sysconfdir, read-only single-machine data, ${PREFIX}/etc +sharedstatedir, modifiable architecture-independent data, ${PREFIX}/com +localstatedir, modifiable single-machine data, ${PREFIX}/var +libdir, object code libraries, ${EXEC_PREFIX}/lib +includedir, C header files, ${PREFIX}/include +oldincludedir, C header files for non-gcc, /usr/include +datarootdir, read-only arch.-independent data root, ${PREFIX}/share +datadir, read-only architecture-independent data, ${DATAROOTDIR} +infodir, info documentation, ${DATAROOTDIR}/info +localedir, locale-dependent data, ${DATAROOTDIR}/locale +mandir, man documentation, ${DATAROOTDIR}/man +docdir, documentation root, ${DATAROOTDIR}/doc/${PACKAGE} +htmldir, html documentation, ${DOCDIR} +dvidir, dvi documentation, ${DOCDIR} +pdfdir, pdf documentation, ${DOCDIR} +psdir, ps documentation, ${DOCDIR} +'''.split('\n')if x] +def configure(conf): + def get_param(varname,default): + return getattr(Options.options,varname,'')or default + env=conf.env + env.LIBDIR=env.BINDIR=[] + env.EXEC_PREFIX=get_param('EXEC_PREFIX',env.PREFIX) + env.PACKAGE=getattr(Context.g_module,'APPNAME',None)or env.PACKAGE + complete=False + iter=0 + while not complete and iter<len(_options)+1: + iter+=1 + complete=True + for name,help,default in _options: + name=name.upper() + if not env[name]: + try: + env[name]=Utils.subst_vars(get_param(name,default).replace('/',os.sep),env) + except TypeError: + complete=False + if not complete: + lst=[name for name,_,_ in _options if not env[name.upper()]] + raise conf.errors.WafError('Variable substitution failure %r'%lst) +def options(opt): + inst_dir=opt.add_option_group('Installation directories','By default, "waf install" will put the files in\ + "/usr/local/bin", "/usr/local/lib" etc. An installation prefix other\ + than "/usr/local" can be given using "--prefix", for example "--prefix=$HOME"') + for k in('--prefix','--destdir'): + option=opt.parser.get_option(k) + if option: + opt.parser.remove_option(k) + inst_dir.add_option(option) + inst_dir.add_option('--exec-prefix',help='installation prefix [Default: ${PREFIX}]',default='',dest='EXEC_PREFIX') + dirs_options=opt.add_option_group('Pre-defined installation directories','') + for name,help,default in _options: + option_name='--'+name + str_default=default + str_help='%s [Default: %s]'%(help,str_default) + dirs_options.add_option(option_name,help=str_help,default='',dest=name.upper()) diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gxx.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gxx.py new file mode 100644 index 0000000..475f7fd --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gxx.py @@ -0,0 +1,98 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +from waflib.Tools import ccroot,ar +from waflib.Configure import conf +@conf +def find_gxx(conf): + cxx=conf.find_program(['g++','c++'],var='CXX') + cxx=conf.cmd_to_list(cxx) + conf.get_cc_version(cxx,gcc=True) + conf.env.CXX_NAME='gcc' + conf.env.CXX=cxx +@conf +def gxx_common_flags(conf): + v=conf.env + v['CXX_SRC_F']=[] + v['CXX_TGT_F']=['-c','-o'] + if not v['LINK_CXX']:v['LINK_CXX']=v['CXX'] + v['CXXLNK_SRC_F']=[] + v['CXXLNK_TGT_F']=['-o'] + v['CPPPATH_ST']='-I%s' + v['DEFINES_ST']='-D%s' + v['LIB_ST']='-l%s' + v['LIBPATH_ST']='-L%s' + v['STLIB_ST']='-l%s' + v['STLIBPATH_ST']='-L%s' + v['RPATH_ST']='-Wl,-rpath,%s' + v['SONAME_ST']='-Wl,-h,%s' + v['SHLIB_MARKER']='-Wl,-Bdynamic' + v['STLIB_MARKER']='-Wl,-Bstatic' + v['cxxprogram_PATTERN']='%s' + v['CXXFLAGS_cxxshlib']=['-fPIC'] + v['LINKFLAGS_cxxshlib']=['-shared'] + v['cxxshlib_PATTERN']='lib%s.so' + v['LINKFLAGS_cxxstlib']=['-Wl,-Bstatic'] + v['cxxstlib_PATTERN']='lib%s.a' + v['LINKFLAGS_MACBUNDLE']=['-bundle','-undefined','dynamic_lookup'] + v['CXXFLAGS_MACBUNDLE']=['-fPIC'] + v['macbundle_PATTERN']='%s.bundle' +@conf +def gxx_modifier_win32(conf): + v=conf.env + v['cxxprogram_PATTERN']='%s.exe' + v['cxxshlib_PATTERN']='%s.dll' + v['implib_PATTERN']='lib%s.dll.a' + v['IMPLIB_ST']='-Wl,--out-implib,%s' + v['CXXFLAGS_cxxshlib']=[] + v.append_value('LINKFLAGS',['-Wl,--enable-auto-import']) +@conf +def gxx_modifier_cygwin(conf): + gxx_modifier_win32(conf) + v=conf.env + v['cxxshlib_PATTERN']='cyg%s.dll' + v.append_value('LINKFLAGS_cxxshlib',['-Wl,--enable-auto-image-base']) + v['CXXFLAGS_cxxshlib']=[] +@conf +def gxx_modifier_darwin(conf): + v=conf.env + v['CXXFLAGS_cxxshlib']=['-fPIC'] + v['LINKFLAGS_cxxshlib']=['-dynamiclib','-Wl,-compatibility_version,1','-Wl,-current_version,1'] + v['cxxshlib_PATTERN']='lib%s.dylib' + v['FRAMEWORKPATH_ST']='-F%s' + v['FRAMEWORK_ST']=['-framework'] + v['ARCH_ST']=['-arch'] + v['LINKFLAGS_cxxstlib']=[] + v['SHLIB_MARKER']=[] + v['STLIB_MARKER']=[] + v['SONAME_ST']=[] +@conf +def gxx_modifier_aix(conf): + v=conf.env + v['LINKFLAGS_cxxprogram']=['-Wl,-brtl'] + v['LINKFLAGS_cxxshlib']=['-shared','-Wl,-brtl,-bexpfull'] + v['SHLIB_MARKER']=[] +@conf +def gxx_modifier_hpux(conf): + v=conf.env + v['SHLIB_MARKER']=[] + v['STLIB_MARKER']='-Bstatic' + v['CFLAGS_cxxshlib']=['-fPIC','-DPIC'] + v['cxxshlib_PATTERN']='lib%s.sl' +@conf +def gxx_modifier_openbsd(conf): + conf.env.SONAME_ST=[] +@conf +def gxx_modifier_platform(conf): + gxx_modifier_func=getattr(conf,'gxx_modifier_'+conf.env.DEST_OS,None) + if gxx_modifier_func: + gxx_modifier_func() +def configure(conf): + conf.find_gxx() + conf.find_ar() + conf.gxx_common_flags() + conf.gxx_modifier_platform() + conf.cxx_load_tools() + conf.cxx_add_flags() + conf.link_add_flags() diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gxx.pyc b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gxx.pyc Binary files differnew file mode 100644 index 0000000..6e3c454 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gxx.pyc diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/icc.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/icc.py new file mode 100644 index 0000000..7c75e18 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/icc.py @@ -0,0 +1,30 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,sys +from waflib.Tools import ccroot,ar,gcc +from waflib.Configure import conf +@conf +def find_icc(conf): + if sys.platform=='cygwin': + conf.fatal('The Intel compiler does not work on Cygwin') + v=conf.env + cc=None + if v['CC']:cc=v['CC'] + elif'CC'in conf.environ:cc=conf.environ['CC'] + if not cc:cc=conf.find_program('icc',var='CC') + if not cc:cc=conf.find_program('ICL',var='CC') + if not cc:conf.fatal('Intel C Compiler (icc) was not found') + cc=conf.cmd_to_list(cc) + conf.get_cc_version(cc,icc=True) + v['CC']=cc + v['CC_NAME']='icc' +def configure(conf): + conf.find_icc() + conf.find_ar() + conf.gcc_common_flags() + conf.gcc_modifier_platform() + conf.cc_load_tools() + conf.cc_add_flags() + conf.link_add_flags() diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/icc.pyc b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/icc.pyc Binary files differnew file mode 100644 index 0000000..9718f28 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/icc.pyc diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/icpc.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/icpc.py new file mode 100644 index 0000000..14a5325 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/icpc.py @@ -0,0 +1,29 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,sys +from waflib.Tools import ccroot,ar,gxx +from waflib.Configure import conf +@conf +def find_icpc(conf): + if sys.platform=='cygwin': + conf.fatal('The Intel compiler does not work on Cygwin') + v=conf.env + cxx=None + if v['CXX']:cxx=v['CXX'] + elif'CXX'in conf.environ:cxx=conf.environ['CXX'] + if not cxx:cxx=conf.find_program('icpc',var='CXX') + if not cxx:conf.fatal('Intel C++ Compiler (icpc) was not found') + cxx=conf.cmd_to_list(cxx) + conf.get_cc_version(cxx,icc=True) + v['CXX']=cxx + v['CXX_NAME']='icc' +def configure(conf): + conf.find_icpc() + conf.find_ar() + conf.gxx_common_flags() + conf.gxx_modifier_platform() + conf.cxx_load_tools() + conf.cxx_add_flags() + conf.link_add_flags() diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/icpc.pyc b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/icpc.pyc Binary files differnew file mode 100644 index 0000000..60a5599 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/icpc.pyc diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/ifort.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/ifort.py new file mode 100644 index 0000000..a9f2528 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/ifort.py @@ -0,0 +1,49 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import re +from waflib import Utils +from waflib.Tools import fc,fc_config,fc_scan,ar +from waflib.Configure import conf +@conf +def find_ifort(conf): + fc=conf.find_program('ifort',var='FC') + fc=conf.cmd_to_list(fc) + conf.get_ifort_version(fc) + conf.env.FC_NAME='IFORT' +@conf +def ifort_modifier_cygwin(conf): + raise NotImplementedError("Ifort on cygwin not yet implemented") +@conf +def ifort_modifier_win32(conf): + fc_config.fortran_modifier_win32(conf) +@conf +def ifort_modifier_darwin(conf): + fc_config.fortran_modifier_darwin(conf) +@conf +def ifort_modifier_platform(conf): + dest_os=conf.env['DEST_OS']or Utils.unversioned_sys_platform() + ifort_modifier_func=getattr(conf,'ifort_modifier_'+dest_os,None) + if ifort_modifier_func: + ifort_modifier_func() +@conf +def get_ifort_version(conf,fc): + version_re=re.compile(r"ifort\s*\(IFORT\)\s*(?P<major>\d*)\.(?P<minor>\d*)",re.I).search + cmd=fc+['--version'] + out,err=fc_config.getoutput(conf,cmd,stdin=False) + if out: + match=version_re(out) + else: + match=version_re(err) + if not match: + conf.fatal('cannot determine ifort version.') + k=match.groupdict() + conf.env['FC_VERSION']=(k['major'],k['minor']) +def configure(conf): + conf.find_ifort() + conf.find_program('xiar',var='AR') + conf.env.ARFLAGS='rcs' + conf.fc_flags() + conf.fc_add_flags() + conf.ifort_modifier_platform() diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/intltool.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/intltool.py new file mode 100644 index 0000000..d558674 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/intltool.py @@ -0,0 +1,77 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,re +from waflib import Configure,TaskGen,Task,Utils,Runner,Options,Build,Logs +import waflib.Tools.ccroot +from waflib.TaskGen import feature,before_method +from waflib.Logs import error +@before_method('process_source') +@feature('intltool_in') +def apply_intltool_in_f(self): + try:self.meths.remove('process_source') + except ValueError:pass + if not self.env.LOCALEDIR: + self.env.LOCALEDIR=self.env.PREFIX+'/share/locale' + for i in self.to_list(self.source): + node=self.path.find_resource(i) + podir=getattr(self,'podir','po') + podirnode=self.path.find_dir(podir) + if not podirnode: + error("could not find the podir %r"%podir) + continue + cache=getattr(self,'intlcache','.intlcache') + self.env['INTLCACHE']=os.path.join(self.path.bldpath(),podir,cache) + self.env['INTLPODIR']=podirnode.bldpath() + self.env['INTLFLAGS']=getattr(self,'flags',['-q','-u','-c']) + task=self.create_task('intltool',node,node.change_ext('')) + inst=getattr(self,'install_path','${LOCALEDIR}') + if inst: + self.bld.install_files(inst,task.outputs) +@feature('intltool_po') +def apply_intltool_po(self): + try:self.meths.remove('process_source') + except ValueError:pass + if not self.env.LOCALEDIR: + self.env.LOCALEDIR=self.env.PREFIX+'/share/locale' + appname=getattr(self,'appname','set_your_app_name') + podir=getattr(self,'podir','') + inst=getattr(self,'install_path','${LOCALEDIR}') + linguas=self.path.find_node(os.path.join(podir,'LINGUAS')) + if linguas: + file=open(linguas.abspath()) + langs=[] + for line in file.readlines(): + if not line.startswith('#'): + langs+=line.split() + file.close() + re_linguas=re.compile('[-a-zA-Z_@.]+') + for lang in langs: + if re_linguas.match(lang): + node=self.path.find_resource(os.path.join(podir,re_linguas.match(lang).group()+'.po')) + task=self.create_task('po',node,node.change_ext('.mo')) + if inst: + filename=task.outputs[0].name + (langname,ext)=os.path.splitext(filename) + inst_file=inst+os.sep+langname+os.sep+'LC_MESSAGES'+os.sep+appname+'.mo' + self.bld.install_as(inst_file,task.outputs[0],chmod=getattr(self,'chmod',Utils.O644),env=task.env) + else: + Logs.pprint('RED',"Error no LINGUAS file found in po directory") +class po(Task.Task): + run_str='${MSGFMT} -o ${TGT} ${SRC}' + color='BLUE' +class intltool(Task.Task): + run_str='${INTLTOOL} ${INTLFLAGS} ${INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}' + color='BLUE' +def configure(conf): + conf.find_program('msgfmt',var='MSGFMT') + conf.find_perl_program('intltool-merge',var='INTLTOOL') + prefix=conf.env.PREFIX + datadir=conf.env.DATADIR + if not datadir: + datadir=os.path.join(prefix,'share') + conf.define('LOCALEDIR',os.path.join(datadir,'locale').replace('\\','\\\\')) + conf.define('DATADIR',datadir.replace('\\','\\\\')) + if conf.env.CC or conf.env.CXX: + conf.check(header_name='locale.h') diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/irixcc.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/irixcc.py new file mode 100644 index 0000000..8dbdfca --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/irixcc.py @@ -0,0 +1,48 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os +from waflib import Utils +from waflib.Tools import ccroot,ar +from waflib.Configure import conf +@conf +def find_irixcc(conf): + v=conf.env + cc=None + if v['CC']:cc=v['CC'] + elif'CC'in conf.environ:cc=conf.environ['CC'] + if not cc:cc=conf.find_program('cc',var='CC') + if not cc:conf.fatal('irixcc was not found') + cc=conf.cmd_to_list(cc) + try: + conf.cmd_and_log(cc+['-version']) + except Exception: + conf.fatal('%r -version could not be executed'%cc) + v['CC']=cc + v['CC_NAME']='irix' +@conf +def irixcc_common_flags(conf): + v=conf.env + v['CC_SRC_F']='' + v['CC_TGT_F']=['-c','-o'] + v['CPPPATH_ST']='-I%s' + v['DEFINES_ST']='-D%s' + if not v['LINK_CC']:v['LINK_CC']=v['CC'] + v['CCLNK_SRC_F']='' + v['CCLNK_TGT_F']=['-o'] + v['LIB_ST']='-l%s' + v['LIBPATH_ST']='-L%s' + v['STLIB_ST']='-l%s' + v['STLIBPATH_ST']='-L%s' + v['cprogram_PATTERN']='%s' + v['cshlib_PATTERN']='lib%s.so' + v['cstlib_PATTERN']='lib%s.a' +def configure(conf): + conf.find_irixcc() + conf.find_cpp() + conf.find_ar() + conf.irixcc_common_flags() + conf.cc_load_tools() + conf.cc_add_flags() + conf.link_add_flags() diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/javaw.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/javaw.py new file mode 100644 index 0000000..15f128c --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/javaw.py @@ -0,0 +1,309 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,re,tempfile,shutil +from waflib import TaskGen,Task,Utils,Options,Build,Errors,Node,Logs +from waflib.Configure import conf +from waflib.TaskGen import feature,before_method,after_method +from waflib.Tools import ccroot +ccroot.USELIB_VARS['javac']=set(['CLASSPATH','JAVACFLAGS']) +SOURCE_RE='**/*.java' +JAR_RE='**/*' +class_check_source=''' +public class Test { + public static void main(String[] argv) { + Class lib; + if (argv.length < 1) { + System.err.println("Missing argument"); + System.exit(77); + } + try { + lib = Class.forName(argv[0]); + } catch (ClassNotFoundException e) { + System.err.println("ClassNotFoundException"); + System.exit(1); + } + lib = null; + System.exit(0); + } +} +''' +@feature('javac') +@before_method('process_source') +def apply_java(self): + Utils.def_attrs(self,jarname='',classpath='',sourcepath='.',srcdir='.',jar_mf_attributes={},jar_mf_classpath=[]) + nodes_lst=[] + outdir=getattr(self,'outdir',None) + if outdir: + if not isinstance(outdir,Node.Node): + outdir=self.path.get_bld().make_node(self.outdir) + else: + outdir=self.path.get_bld() + outdir.mkdir() + self.outdir=outdir + self.env['OUTDIR']=outdir.abspath() + self.javac_task=tsk=self.create_task('javac') + tmp=[] + srcdir=getattr(self,'srcdir','') + if isinstance(srcdir,Node.Node): + srcdir=[srcdir] + for x in Utils.to_list(srcdir): + if isinstance(x,Node.Node): + y=x + else: + y=self.path.find_dir(x) + if not y: + self.bld.fatal('Could not find the folder %s from %s'%(x,self.path)) + tmp.append(y) + tsk.srcdir=tmp + if getattr(self,'compat',None): + tsk.env.append_value('JAVACFLAGS',['-source',self.compat]) + if hasattr(self,'sourcepath'): + fold=[isinstance(x,Node.Node)and x or self.path.find_dir(x)for x in self.to_list(self.sourcepath)] + names=os.pathsep.join([x.srcpath()for x in fold]) + else: + names=[x.srcpath()for x in tsk.srcdir] + if names: + tsk.env.append_value('JAVACFLAGS',['-sourcepath',names]) +@feature('javac') +@after_method('apply_java') +def use_javac_files(self): + lst=[] + self.uselib=self.to_list(getattr(self,'uselib',[])) + names=self.to_list(getattr(self,'use',[])) + get=self.bld.get_tgen_by_name + for x in names: + try: + y=get(x) + except Exception: + self.uselib.append(x) + else: + y.post() + lst.append(y.jar_task.outputs[0].abspath()) + self.javac_task.set_run_after(y.jar_task) + if lst: + self.env.append_value('CLASSPATH',lst) +@feature('javac') +@after_method('apply_java','propagate_uselib_vars','use_javac_files') +def set_classpath(self): + self.env.append_value('CLASSPATH',getattr(self,'classpath',[])) + for x in self.tasks: + x.env.CLASSPATH=os.pathsep.join(self.env.CLASSPATH)+os.pathsep +@feature('jar') +@after_method('apply_java','use_javac_files') +@before_method('process_source') +def jar_files(self): + destfile=getattr(self,'destfile','test.jar') + jaropts=getattr(self,'jaropts',[]) + manifest=getattr(self,'manifest',None) + basedir=getattr(self,'basedir',None) + if basedir: + if not isinstance(self.basedir,Node.Node): + basedir=self.path.get_bld().make_node(basedir) + else: + basedir=self.path.get_bld() + if not basedir: + self.bld.fatal('Could not find the basedir %r for %r'%(self.basedir,self)) + self.jar_task=tsk=self.create_task('jar_create') + if manifest: + jarcreate=getattr(self,'jarcreate','cfm') + node=self.path.find_node(manifest) + tsk.dep_nodes.append(node) + jaropts.insert(0,node.abspath()) + else: + jarcreate=getattr(self,'jarcreate','cf') + if not isinstance(destfile,Node.Node): + destfile=self.path.find_or_declare(destfile) + if not destfile: + self.bld.fatal('invalid destfile %r for %r'%(destfile,self)) + tsk.set_outputs(destfile) + tsk.basedir=basedir + jaropts.append('-C') + jaropts.append(basedir.bldpath()) + jaropts.append('.') + tsk.env['JAROPTS']=jaropts + tsk.env['JARCREATE']=jarcreate + if getattr(self,'javac_task',None): + tsk.set_run_after(self.javac_task) +@feature('jar') +@after_method('jar_files') +def use_jar_files(self): + lst=[] + self.uselib=self.to_list(getattr(self,'uselib',[])) + names=self.to_list(getattr(self,'use',[])) + get=self.bld.get_tgen_by_name + for x in names: + try: + y=get(x) + except Exception: + self.uselib.append(x) + else: + y.post() + self.jar_task.run_after.update(y.tasks) +class jar_create(Task.Task): + color='GREEN' + run_str='${JAR} ${JARCREATE} ${TGT} ${JAROPTS}' + def runnable_status(self): + for t in self.run_after: + if not t.hasrun: + return Task.ASK_LATER + if not self.inputs: + global JAR_RE + try: + self.inputs=[x for x in self.basedir.ant_glob(JAR_RE,remove=False)if id(x)!=id(self.outputs[0])] + except Exception: + raise Errors.WafError('Could not find the basedir %r for %r'%(self.basedir,self)) + return super(jar_create,self).runnable_status() +class javac(Task.Task): + color='BLUE' + nocache=True + vars=['CLASSPATH','JAVACFLAGS','JAVAC','OUTDIR'] + def runnable_status(self): + for t in self.run_after: + if not t.hasrun: + return Task.ASK_LATER + if not self.inputs: + global SOURCE_RE + self.inputs=[] + for x in self.srcdir: + self.inputs.extend(x.ant_glob(SOURCE_RE,remove=False)) + return super(javac,self).runnable_status() + def run(self): + env=self.env + gen=self.generator + bld=gen.bld + wd=bld.bldnode.abspath() + def to_list(xx): + if isinstance(xx,str):return[xx] + return xx + cmd=[] + cmd.extend(to_list(env['JAVAC'])) + cmd.extend(['-classpath']) + cmd.extend(to_list(env['CLASSPATH'])) + cmd.extend(['-d']) + cmd.extend(to_list(env['OUTDIR'])) + cmd.extend(to_list(env['JAVACFLAGS'])) + files=[a.path_from(bld.bldnode)for a in self.inputs] + tmp=None + try: + if len(str(files))+len(str(cmd))>8192: + (fd,tmp)=tempfile.mkstemp(dir=bld.bldnode.abspath()) + try: + os.write(fd,'\n'.join(files)) + finally: + if tmp: + os.close(fd) + if Logs.verbose: + Logs.debug('runner: %r'%(cmd+files)) + cmd.append('@'+tmp) + else: + cmd+=files + ret=self.exec_command(cmd,cwd=wd,env=env.env or None) + finally: + if tmp: + os.remove(tmp) + return ret + def post_run(self): + for n in self.generator.outdir.ant_glob('**/*.class'): + n.sig=Utils.h_file(n.abspath()) + self.generator.bld.task_sigs[self.uid()]=self.cache_sig +@feature('javadoc') +@after_method('process_rule') +def create_javadoc(self): + tsk=self.create_task('javadoc') + tsk.classpath=getattr(self,'classpath',[]) + self.javadoc_package=Utils.to_list(self.javadoc_package) + if not isinstance(self.javadoc_output,Node.Node): + self.javadoc_output=self.bld.path.find_or_declare(self.javadoc_output) +class javadoc(Task.Task): + color='BLUE' + def __str__(self): + return'%s: %s -> %s\n'%(self.__class__.__name__,self.generator.srcdir,self.generator.javadoc_output) + def run(self): + env=self.env + bld=self.generator.bld + wd=bld.bldnode.abspath() + srcpath=self.generator.path.abspath()+os.sep+self.generator.srcdir + srcpath+=os.pathsep + srcpath+=self.generator.path.get_bld().abspath()+os.sep+self.generator.srcdir + classpath=env.CLASSPATH + classpath+=os.pathsep + classpath+=os.pathsep.join(self.classpath) + classpath="".join(classpath) + self.last_cmd=lst=[] + lst.extend(Utils.to_list(env['JAVADOC'])) + lst.extend(['-d',self.generator.javadoc_output.abspath()]) + lst.extend(['-sourcepath',srcpath]) + lst.extend(['-classpath',classpath]) + lst.extend(['-subpackages']) + lst.extend(self.generator.javadoc_package) + lst=[x for x in lst if x] + self.generator.bld.cmd_and_log(lst,cwd=wd,env=env.env or None,quiet=0) + def post_run(self): + nodes=self.generator.javadoc_output.ant_glob('**') + for x in nodes: + x.sig=Utils.h_file(x.abspath()) + self.generator.bld.task_sigs[self.uid()]=self.cache_sig +def configure(self): + java_path=self.environ['PATH'].split(os.pathsep) + v=self.env + if'JAVA_HOME'in self.environ: + java_path=[os.path.join(self.environ['JAVA_HOME'],'bin')]+java_path + self.env['JAVA_HOME']=[self.environ['JAVA_HOME']] + for x in'javac java jar javadoc'.split(): + self.find_program(x,var=x.upper(),path_list=java_path) + self.env[x.upper()]=self.cmd_to_list(self.env[x.upper()]) + if'CLASSPATH'in self.environ: + v['CLASSPATH']=self.environ['CLASSPATH'] + if not v['JAR']:self.fatal('jar is required for making java packages') + if not v['JAVAC']:self.fatal('javac is required for compiling java classes') + v['JARCREATE']='cf' + v['JAVACFLAGS']=[] +@conf +def check_java_class(self,classname,with_classpath=None): + javatestdir='.waf-javatest' + classpath=javatestdir + if self.env['CLASSPATH']: + classpath+=os.pathsep+self.env['CLASSPATH'] + if isinstance(with_classpath,str): + classpath+=os.pathsep+with_classpath + shutil.rmtree(javatestdir,True) + os.mkdir(javatestdir) + Utils.writef(os.path.join(javatestdir,'Test.java'),class_check_source) + self.exec_command(self.env['JAVAC']+[os.path.join(javatestdir,'Test.java')],shell=False) + cmd=self.env['JAVA']+['-cp',classpath,'Test',classname] + self.to_log("%s\n"%str(cmd)) + found=self.exec_command(cmd,shell=False) + self.msg('Checking for java class %s'%classname,not found) + shutil.rmtree(javatestdir,True) + return found +@conf +def check_jni_headers(conf): + if not conf.env.CC_NAME and not conf.env.CXX_NAME: + conf.fatal('load a compiler first (gcc, g++, ..)') + if not conf.env.JAVA_HOME: + conf.fatal('set JAVA_HOME in the system environment') + javaHome=conf.env['JAVA_HOME'][0] + dir=conf.root.find_dir(conf.env.JAVA_HOME[0]+'/include') + if dir is None: + dir=conf.root.find_dir(conf.env.JAVA_HOME[0]+'/../Headers') + if dir is None: + conf.fatal('JAVA_HOME does not seem to be set properly') + f=dir.ant_glob('**/(jni|jni_md).h') + incDirs=[x.parent.abspath()for x in f] + dir=conf.root.find_dir(conf.env.JAVA_HOME[0]) + f=dir.ant_glob('**/*jvm.(so|dll|dylib)') + libDirs=[x.parent.abspath()for x in f]or[javaHome] + f=dir.ant_glob('**/*jvm.(lib)') + if f: + libDirs=[[x,y.parent.abspath()]for x in libDirs for y in f] + for d in libDirs: + try: + conf.check(header_name='jni.h',define_name='HAVE_JNI_H',lib='jvm',libpath=d,includes=incDirs,uselib_store='JAVA',uselib='JAVA') + except Exception: + pass + else: + break + else: + conf.fatal('could not find lib jvm in %r (see config.log)'%libDirs) diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/kde4.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/kde4.py new file mode 100644 index 0000000..cd51f5f --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/kde4.py @@ -0,0 +1,48 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,sys,re +from waflib import Options,TaskGen,Task,Utils +from waflib.TaskGen import feature,after_method +@feature('msgfmt') +def apply_msgfmt(self): + for lang in self.to_list(self.langs): + node=self.path.find_resource(lang+'.po') + task=self.create_task('msgfmt',node,node.change_ext('.mo')) + langname=lang.split('/') + langname=langname[-1] + inst=getattr(self,'install_path','${KDE4_LOCALE_INSTALL_DIR}') + self.bld.install_as(inst+os.sep+langname+os.sep+'LC_MESSAGES'+os.sep+getattr(self,'appname','set_your_appname')+'.mo',task.outputs[0],chmod=getattr(self,'chmod',Utils.O644)) +class msgfmt(Task.Task): + color='BLUE' + run_str='${MSGFMT} ${SRC} -o ${TGT}' +def configure(self): + kdeconfig=self.find_program('kde4-config') + prefix=self.cmd_and_log('%s --prefix'%kdeconfig).strip() + fname='%s/share/apps/cmake/modules/KDELibsDependencies.cmake'%prefix + try:os.stat(fname) + except OSError: + fname='%s/share/kde4/apps/cmake/modules/KDELibsDependencies.cmake'%prefix + try:os.stat(fname) + except OSError:self.fatal('could not open %s'%fname) + try: + txt=Utils.readf(fname) + except(OSError,IOError): + self.fatal('could not read %s'%fname) + txt=txt.replace('\\\n','\n') + fu=re.compile('#(.*)\n') + txt=fu.sub('',txt) + setregexp=re.compile('([sS][eE][tT]\s*\()\s*([^\s]+)\s+\"([^"]+)\"\)') + found=setregexp.findall(txt) + for(_,key,val)in found: + self.env[key]=val + self.env['LIB_KDECORE']=['kdecore'] + self.env['LIB_KDEUI']=['kdeui'] + self.env['LIB_KIO']=['kio'] + self.env['LIB_KHTML']=['khtml'] + self.env['LIB_KPARTS']=['kparts'] + self.env['LIBPATH_KDECORE']=[os.path.join(self.env.KDE4_LIB_INSTALL_DIR,'kde4','devel'),self.env.KDE4_LIB_INSTALL_DIR] + self.env['INCLUDES_KDECORE']=[self.env['KDE4_INCLUDE_INSTALL_DIR']] + self.env.append_value('INCLUDES_KDECORE',[self.env['KDE4_INCLUDE_INSTALL_DIR']+os.sep+'KDE']) + self.find_program('msgfmt',var='MSGFMT') diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/ldc2.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/ldc2.py new file mode 100644 index 0000000..25b99e5 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/ldc2.py @@ -0,0 +1,37 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import sys +from waflib.Tools import ar,d +from waflib.Configure import conf +@conf +def find_ldc2(conf): + conf.find_program(['ldc2'],var='D') + out=conf.cmd_and_log([conf.env.D,'-version']) + if out.find("based on DMD v2.")==-1: + conf.fatal("detected compiler is not ldc2") +@conf +def common_flags_ldc2(conf): + v=conf.env + v['D_SRC_F']=['-c'] + v['D_TGT_F']='-of%s' + v['D_LINKER']=v['D'] + v['DLNK_SRC_F']='' + v['DLNK_TGT_F']='-of%s' + v['DINC_ST']='-I%s' + v['DSHLIB_MARKER']=v['DSTLIB_MARKER']='' + v['DSTLIB_ST']=v['DSHLIB_ST']='-L-l%s' + v['DSTLIBPATH_ST']=v['DLIBPATH_ST']='-L-L%s' + v['LINKFLAGS_dshlib']=['-L-shared'] + v['DHEADER_ext']='.di' + v['DFLAGS_d_with_header']=['-H','-Hf'] + v['D_HDR_F']='%s' + v['LINKFLAGS']=[] + v['DFLAGS_dshlib']=['-relocation-model=pic'] +def configure(conf): + conf.find_ldc2() + conf.load('ar') + conf.load('d') + conf.common_flags_ldc2() + conf.d_platform_flags() diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/lua.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/lua.py new file mode 100644 index 0000000..a0a35fc --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/lua.py @@ -0,0 +1,18 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +from waflib.TaskGen import extension +from waflib import Task,Utils +@extension('.lua') +def add_lua(self,node): + tsk=self.create_task('luac',node,node.change_ext('.luac')) + inst_to=getattr(self,'install_path',self.env.LUADIR and'${LUADIR}'or None) + if inst_to: + self.bld.install_files(inst_to,tsk.outputs) + return tsk +class luac(Task.Task): + run_str='${LUAC} -s -o ${TGT} ${SRC}' + color='PINK' +def configure(conf): + conf.find_program('luac',var='LUAC') diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/msvc.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/msvc.py new file mode 100644 index 0000000..dd62e40 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/msvc.py @@ -0,0 +1,749 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,sys,re,tempfile +from waflib import Utils,Task,Logs,Options,Errors +from waflib.Logs import debug,warn +from waflib.TaskGen import after_method,feature +from waflib.Configure import conf +from waflib.Tools import ccroot,c,cxx,ar,winres +g_msvc_systemlibs=''' +aclui activeds ad1 adptif adsiid advapi32 asycfilt authz bhsupp bits bufferoverflowu cabinet +cap certadm certidl ciuuid clusapi comctl32 comdlg32 comsupp comsuppd comsuppw comsuppwd comsvcs +credui crypt32 cryptnet cryptui d3d8thk daouuid dbgeng dbghelp dciman32 ddao35 ddao35d +ddao35u ddao35ud delayimp dhcpcsvc dhcpsapi dlcapi dnsapi dsprop dsuiext dtchelp +faultrep fcachdll fci fdi framedyd framedyn gdi32 gdiplus glauxglu32 gpedit gpmuuid +gtrts32w gtrtst32hlink htmlhelp httpapi icm32 icmui imagehlp imm32 iphlpapi iprop +kernel32 ksguid ksproxy ksuser libcmt libcmtd libcpmt libcpmtd loadperf lz32 mapi +mapi32 mgmtapi minidump mmc mobsync mpr mprapi mqoa mqrt msacm32 mscms mscoree +msdasc msimg32 msrating mstask msvcmrt msvcurt msvcurtd mswsock msxml2 mtx mtxdm +netapi32 nmapinmsupp npptools ntdsapi ntdsbcli ntmsapi ntquery odbc32 odbcbcp +odbccp32 oldnames ole32 oleacc oleaut32 oledb oledlgolepro32 opends60 opengl32 +osptk parser pdh penter pgobootrun pgort powrprof psapi ptrustm ptrustmd ptrustu +ptrustud qosname rasapi32 rasdlg rassapi resutils riched20 rpcndr rpcns4 rpcrt4 rtm +rtutils runtmchk scarddlg scrnsave scrnsavw secur32 sensapi setupapi sfc shell32 +shfolder shlwapi sisbkup snmpapi sporder srclient sti strsafe svcguid tapi32 thunk32 +traffic unicows url urlmon user32 userenv usp10 uuid uxtheme vcomp vcompd vdmdbg +version vfw32 wbemuuid webpost wiaguid wininet winmm winscard winspool winstrm +wintrust wldap32 wmiutils wow32 ws2_32 wsnmp32 wsock32 wst wtsapi32 xaswitch xolehlp +'''.split() +all_msvc_platforms=[('x64','amd64'),('x86','x86'),('ia64','ia64'),('x86_amd64','amd64'),('x86_ia64','ia64'),('x86_arm','arm')] +all_wince_platforms=[('armv4','arm'),('armv4i','arm'),('mipsii','mips'),('mipsii_fp','mips'),('mipsiv','mips'),('mipsiv_fp','mips'),('sh4','sh'),('x86','cex86')] +all_icl_platforms=[('intel64','amd64'),('em64t','amd64'),('ia32','x86'),('Itanium','ia64')] +def options(opt): + opt.add_option('--msvc_version',type='string',help='msvc version, eg: "msvc 10.0,msvc 9.0"',default='') + opt.add_option('--msvc_targets',type='string',help='msvc targets, eg: "x64,arm"',default='') +def setup_msvc(conf,versions,arch=False): + platforms=getattr(Options.options,'msvc_targets','').split(',') + if platforms==['']: + platforms=Utils.to_list(conf.env['MSVC_TARGETS'])or[i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms] + desired_versions=getattr(Options.options,'msvc_version','').split(',') + if desired_versions==['']: + desired_versions=conf.env['MSVC_VERSIONS']or[v for v,_ in versions][::-1] + versiondict=dict(versions) + for version in desired_versions: + try: + targets=dict(versiondict[version]) + for target in platforms: + try: + arch,(p1,p2,p3)=targets[target] + compiler,revision=version.rsplit(' ',1) + if arch: + return compiler,revision,p1,p2,p3,arch + else: + return compiler,revision,p1,p2,p3 + except KeyError:continue + except KeyError:continue + conf.fatal('msvc: Impossible to find a valid architecture for building (in setup_msvc)') +@conf +def get_msvc_version(conf,compiler,version,target,vcvars): + debug('msvc: get_msvc_version: %r %r %r',compiler,version,target) + batfile=conf.bldnode.make_node('waf-print-msvc.bat') + batfile.write("""@echo off +set INCLUDE= +set LIB= +call "%s" %s +echo PATH=%%PATH%% +echo INCLUDE=%%INCLUDE%% +echo LIB=%%LIB%%;%%LIBPATH%% +"""%(vcvars,target)) + sout=conf.cmd_and_log(['cmd','/E:on','/V:on','/C',batfile.abspath()]) + lines=sout.splitlines() + if not lines[0]: + lines.pop(0) + MSVC_PATH=MSVC_INCDIR=MSVC_LIBDIR=None + for line in lines: + if line.startswith('PATH='): + path=line[5:] + MSVC_PATH=path.split(';') + elif line.startswith('INCLUDE='): + MSVC_INCDIR=[i for i in line[8:].split(';')if i] + elif line.startswith('LIB='): + MSVC_LIBDIR=[i for i in line[4:].split(';')if i] + if None in(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR): + conf.fatal('msvc: Could not find a valid architecture for building (get_msvc_version_3)') + env=dict(os.environ) + env.update(PATH=path) + compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler) + cxx=conf.find_program(compiler_name,path_list=MSVC_PATH) + cxx=conf.cmd_to_list(cxx) + if'CL'in env: + del(env['CL']) + try: + try: + conf.cmd_and_log(cxx+['/help'],env=env) + except Exception ,e: + debug('msvc: get_msvc_version: %r %r %r -> failure'%(compiler,version,target)) + debug(str(e)) + conf.fatal('msvc: cannot run the compiler (in get_msvc_version)') + else: + debug('msvc: get_msvc_version: %r %r %r -> OK',compiler,version,target) + finally: + conf.env[compiler_name]='' + return(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR) +@conf +def gather_wsdk_versions(conf,versions): + version_pattern=re.compile('^v..?.?\...?.?') + try: + all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows') + except WindowsError: + try: + all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows') + except WindowsError: + return + index=0 + while 1: + try: + version=Utils.winreg.EnumKey(all_versions,index) + except WindowsError: + break + index=index+1 + if not version_pattern.match(version): + continue + try: + msvc_version=Utils.winreg.OpenKey(all_versions,version) + path,type=Utils.winreg.QueryValueEx(msvc_version,'InstallationFolder') + except WindowsError: + continue + if os.path.isfile(os.path.join(path,'bin','SetEnv.cmd')): + targets=[] + for target,arch in all_msvc_platforms: + try: + targets.append((target,(arch,conf.get_msvc_version('wsdk',version,'/'+target,os.path.join(path,'bin','SetEnv.cmd'))))) + except conf.errors.ConfigurationError: + pass + versions.append(('wsdk '+version[1:],targets)) +def gather_wince_supported_platforms(): + supported_wince_platforms=[] + try: + ce_sdk=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Microsoft\\Windows CE Tools\\SDKs') + except WindowsError: + try: + ce_sdk=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Microsoft\\Windows CE Tools\\SDKs') + except WindowsError: + ce_sdk='' + if not ce_sdk: + return supported_wince_platforms + ce_index=0 + while 1: + try: + sdk_device=Utils.winreg.EnumKey(ce_sdk,ce_index) + except WindowsError: + break + ce_index=ce_index+1 + sdk=Utils.winreg.OpenKey(ce_sdk,sdk_device) + try: + path,type=Utils.winreg.QueryValueEx(sdk,'SDKRootDir') + except WindowsError: + try: + path,type=Utils.winreg.QueryValueEx(sdk,'SDKInformation') + path,xml=os.path.split(path) + except WindowsError: + continue + path=str(path) + path,device=os.path.split(path) + if not device: + path,device=os.path.split(path) + for arch,compiler in all_wince_platforms: + platforms=[] + if os.path.isdir(os.path.join(path,device,'Lib',arch)): + platforms.append((arch,compiler,os.path.join(path,device,'Include',arch),os.path.join(path,device,'Lib',arch))) + if platforms: + supported_wince_platforms.append((device,platforms)) + return supported_wince_platforms +def gather_msvc_detected_versions(): + version_pattern=re.compile('^(\d\d?\.\d\d?)(Exp)?$') + detected_versions=[] + for vcver,vcvar in[('VCExpress','Exp'),('VisualStudio','')]: + try: + prefix='SOFTWARE\\Wow6432node\\Microsoft\\'+vcver + all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,prefix) + except WindowsError: + try: + prefix='SOFTWARE\\Microsoft\\'+vcver + all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,prefix) + except WindowsError: + continue + index=0 + while 1: + try: + version=Utils.winreg.EnumKey(all_versions,index) + except WindowsError: + break + index=index+1 + match=version_pattern.match(version) + if not match: + continue + else: + versionnumber=float(match.group(1)) + detected_versions.append((versionnumber,version+vcvar,prefix+"\\"+version)) + def fun(tup): + return tup[0] + detected_versions.sort(key=fun) + return detected_versions +@conf +def gather_msvc_targets(conf,versions,version,vc_path): + targets=[] + if os.path.isfile(os.path.join(vc_path,'vcvarsall.bat')): + for target,realtarget in all_msvc_platforms[::-1]: + try: + targets.append((target,(realtarget,conf.get_msvc_version('msvc',version,target,os.path.join(vc_path,'vcvarsall.bat'))))) + except conf.errors.ConfigurationError: + pass + elif os.path.isfile(os.path.join(vc_path,'Common7','Tools','vsvars32.bat')): + try: + targets.append(('x86',('x86',conf.get_msvc_version('msvc',version,'x86',os.path.join(vc_path,'Common7','Tools','vsvars32.bat'))))) + except conf.errors.ConfigurationError: + pass + elif os.path.isfile(os.path.join(vc_path,'Bin','vcvars32.bat')): + try: + targets.append(('x86',('x86',conf.get_msvc_version('msvc',version,'',os.path.join(vc_path,'Bin','vcvars32.bat'))))) + except conf.errors.ConfigurationError: + pass + if targets: + versions.append(('msvc '+version,targets)) +@conf +def gather_wince_targets(conf,versions,version,vc_path,vsvars,supported_platforms): + for device,platforms in supported_platforms: + cetargets=[] + for platform,compiler,include,lib in platforms: + winCEpath=os.path.join(vc_path,'ce') + if not os.path.isdir(winCEpath): + continue + try: + common_bindirs,_1,_2=conf.get_msvc_version('msvc',version,'x86',vsvars) + except conf.errors.ConfigurationError: + continue + if os.path.isdir(os.path.join(winCEpath,'lib',platform)): + bindirs=[os.path.join(winCEpath,'bin',compiler),os.path.join(winCEpath,'bin','x86_'+compiler)]+common_bindirs + incdirs=[os.path.join(winCEpath,'include'),os.path.join(winCEpath,'atlmfc','include'),include] + libdirs=[os.path.join(winCEpath,'lib',platform),os.path.join(winCEpath,'atlmfc','lib',platform),lib] + cetargets.append((platform,(platform,(bindirs,incdirs,libdirs)))) + if cetargets: + versions.append((device+' '+version,cetargets)) +@conf +def gather_winphone_targets(conf,versions,version,vc_path,vsvars): + targets=[] + for target,realtarget in all_msvc_platforms[::-1]: + try: + targets.append((target,(realtarget,conf.get_msvc_version('winphone',version,target,vsvars)))) + except conf.errors.ConfigurationError ,e: + pass + if targets: + versions.append(('winphone '+version,targets)) +@conf +def gather_msvc_versions(conf,versions): + vc_paths=[] + for(v,version,reg)in gather_msvc_detected_versions(): + try: + try: + msvc_version=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,reg+"\\Setup\\VC") + except WindowsError: + msvc_version=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,reg+"\\Setup\\Microsoft Visual C++") + path,type=Utils.winreg.QueryValueEx(msvc_version,'ProductDir') + vc_paths.append((version,os.path.abspath(str(path)))) + except WindowsError: + continue + wince_supported_platforms=gather_wince_supported_platforms() + for version,vc_path in vc_paths: + vs_path=os.path.dirname(vc_path) + vsvars=os.path.join(vs_path,'Common7','Tools','vsvars32.bat') + if wince_supported_platforms and os.path.isfile(vsvars): + conf.gather_wince_targets(versions,version,vc_path,vsvars,wince_supported_platforms) + vsvars=os.path.join(vs_path,'VC','WPSDK','WP80','vcvarsphoneall.bat') + if os.path.isfile(vsvars): + conf.gather_winphone_targets(versions,'8.0',vc_path,vsvars) + for version,vc_path in vc_paths: + vs_path=os.path.dirname(vc_path) + conf.gather_msvc_targets(versions,version,vc_path) +@conf +def gather_icl_versions(conf,versions): + version_pattern=re.compile('^...?.?\....?.?') + try: + all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Intel\\Compilers\\C++') + except WindowsError: + try: + all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Intel\\Compilers\\C++') + except WindowsError: + return + index=0 + while 1: + try: + version=Utils.winreg.EnumKey(all_versions,index) + except WindowsError: + break + index=index+1 + if not version_pattern.match(version): + continue + targets=[] + for target,arch in all_icl_platforms: + try: + if target=='intel64':targetDir='EM64T_NATIVE' + else:targetDir=target + Utils.winreg.OpenKey(all_versions,version+'\\'+targetDir) + icl_version=Utils.winreg.OpenKey(all_versions,version) + path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir') + batch_file=os.path.join(path,'bin','iclvars.bat') + if os.path.isfile(batch_file): + try: + targets.append((target,(arch,conf.get_msvc_version('intel',version,target,batch_file)))) + except conf.errors.ConfigurationError: + pass + except WindowsError: + pass + for target,arch in all_icl_platforms: + try: + icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+target) + path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir') + batch_file=os.path.join(path,'bin','iclvars.bat') + if os.path.isfile(batch_file): + try: + targets.append((target,(arch,conf.get_msvc_version('intel',version,target,batch_file)))) + except conf.errors.ConfigurationError: + pass + except WindowsError: + continue + major=version[0:2] + versions.append(('intel '+major,targets)) +@conf +def gather_intel_composer_versions(conf,versions): + version_pattern=re.compile('^...?.?\...?.?.?') + try: + all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Intel\\Suites') + except WindowsError: + try: + all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Intel\\Suites') + except WindowsError: + return + index=0 + while 1: + try: + version=Utils.winreg.EnumKey(all_versions,index) + except WindowsError: + break + index=index+1 + if not version_pattern.match(version): + continue + targets=[] + for target,arch in all_icl_platforms: + try: + if target=='intel64':targetDir='EM64T_NATIVE' + else:targetDir=target + try: + defaults=Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\'+targetDir) + except WindowsError: + if targetDir=='EM64T_NATIVE': + defaults=Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\EM64T') + else: + raise WindowsError + uid,type=Utils.winreg.QueryValueEx(defaults,'SubKey') + Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++\\'+targetDir) + icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++') + path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir') + batch_file=os.path.join(path,'bin','iclvars.bat') + if os.path.isfile(batch_file): + try: + targets.append((target,(arch,conf.get_msvc_version('intel',version,target,batch_file)))) + except conf.errors.ConfigurationError ,e: + pass + compilervars_warning_attr='_compilervars_warning_key' + if version[0:2]=='13'and getattr(conf,compilervars_warning_attr,True): + setattr(conf,compilervars_warning_attr,False) + patch_url='http://software.intel.com/en-us/forums/topic/328487' + compilervars_arch=os.path.join(path,'bin','compilervars_arch.bat') + for vscomntool in['VS110COMNTOOLS','VS100COMNTOOLS']: + if vscomntool in os.environ: + vs_express_path=os.environ[vscomntool]+r'..\IDE\VSWinExpress.exe' + dev_env_path=os.environ[vscomntool]+r'..\IDE\devenv.exe' + if(r'if exist "%VS110COMNTOOLS%..\IDE\VSWinExpress.exe"'in Utils.readf(compilervars_arch)and not os.path.exists(vs_express_path)and not os.path.exists(dev_env_path)): + Logs.warn(('The Intel compilervar_arch.bat only checks for one Visual Studio SKU ''(VSWinExpress.exe) but it does not seem to be installed at %r. ''The intel command line set up will fail to configure unless the file %r''is patched. See: %s')%(vs_express_path,compilervars_arch,patch_url)) + except WindowsError: + pass + major=version[0:2] + versions.append(('intel '+major,targets)) +@conf +def get_msvc_versions(conf): + if not conf.env['MSVC_INSTALLED_VERSIONS']: + lst=[] + conf.gather_icl_versions(lst) + conf.gather_intel_composer_versions(lst) + conf.gather_wsdk_versions(lst) + conf.gather_msvc_versions(lst) + conf.env['MSVC_INSTALLED_VERSIONS']=lst + return conf.env['MSVC_INSTALLED_VERSIONS'] +@conf +def print_all_msvc_detected(conf): + for version,targets in conf.env['MSVC_INSTALLED_VERSIONS']: + Logs.info(version) + for target,l in targets: + Logs.info("\t"+target) +@conf +def detect_msvc(conf,arch=False): + versions=get_msvc_versions(conf) + return setup_msvc(conf,versions,arch) +@conf +def find_lt_names_msvc(self,libname,is_static=False): + lt_names=['lib%s.la'%libname,'%s.la'%libname,] + for path in self.env['LIBPATH']: + for la in lt_names: + laf=os.path.join(path,la) + dll=None + if os.path.exists(laf): + ltdict=Utils.read_la_file(laf) + lt_libdir=None + if ltdict.get('libdir',''): + lt_libdir=ltdict['libdir'] + if not is_static and ltdict.get('library_names',''): + dllnames=ltdict['library_names'].split() + dll=dllnames[0].lower() + dll=re.sub('\.dll$','',dll) + return(lt_libdir,dll,False) + elif ltdict.get('old_library',''): + olib=ltdict['old_library'] + if os.path.exists(os.path.join(path,olib)): + return(path,olib,True) + elif lt_libdir!=''and os.path.exists(os.path.join(lt_libdir,olib)): + return(lt_libdir,olib,True) + else: + return(None,olib,True) + else: + raise self.errors.WafError('invalid libtool object file: %s'%laf) + return(None,None,None) +@conf +def libname_msvc(self,libname,is_static=False): + lib=libname.lower() + lib=re.sub('\.lib$','',lib) + if lib in g_msvc_systemlibs: + return lib + lib=re.sub('^lib','',lib) + if lib=='m': + return None + (lt_path,lt_libname,lt_static)=self.find_lt_names_msvc(lib,is_static) + if lt_path!=None and lt_libname!=None: + if lt_static==True: + return os.path.join(lt_path,lt_libname) + if lt_path!=None: + _libpaths=[lt_path]+self.env['LIBPATH'] + else: + _libpaths=self.env['LIBPATH'] + static_libs=['lib%ss.lib'%lib,'lib%s.lib'%lib,'%ss.lib'%lib,'%s.lib'%lib,] + dynamic_libs=['lib%s.dll.lib'%lib,'lib%s.dll.a'%lib,'%s.dll.lib'%lib,'%s.dll.a'%lib,'lib%s_d.lib'%lib,'%s_d.lib'%lib,'%s.lib'%lib,] + libnames=static_libs + if not is_static: + libnames=dynamic_libs+static_libs + for path in _libpaths: + for libn in libnames: + if os.path.exists(os.path.join(path,libn)): + debug('msvc: lib found: %s'%os.path.join(path,libn)) + return re.sub('\.lib$','',libn) + self.fatal("The library %r could not be found"%libname) + return re.sub('\.lib$','',libname) +@conf +def check_lib_msvc(self,libname,is_static=False,uselib_store=None): + libn=self.libname_msvc(libname,is_static) + if not uselib_store: + uselib_store=libname.upper() + if False and is_static: + self.env['STLIB_'+uselib_store]=[libn] + else: + self.env['LIB_'+uselib_store]=[libn] +@conf +def check_libs_msvc(self,libnames,is_static=False): + for libname in Utils.to_list(libnames): + self.check_lib_msvc(libname,is_static) +def configure(conf): + conf.autodetect(True) + conf.find_msvc() + conf.msvc_common_flags() + conf.cc_load_tools() + conf.cxx_load_tools() + conf.cc_add_flags() + conf.cxx_add_flags() + conf.link_add_flags() + conf.visual_studio_add_flags() +@conf +def no_autodetect(conf): + conf.env.NO_MSVC_DETECT=1 + configure(conf) +@conf +def autodetect(conf,arch=False): + v=conf.env + if v.NO_MSVC_DETECT: + return + if arch: + compiler,version,path,includes,libdirs,arch=conf.detect_msvc(True) + v['DEST_CPU']=arch + else: + compiler,version,path,includes,libdirs=conf.detect_msvc() + v['PATH']=path + v['INCLUDES']=includes + v['LIBPATH']=libdirs + v['MSVC_COMPILER']=compiler + try: + v['MSVC_VERSION']=float(version) + except Exception: + v['MSVC_VERSION']=float(version[:-3]) +def _get_prog_names(conf,compiler): + if compiler=='intel': + compiler_name='ICL' + linker_name='XILINK' + lib_name='XILIB' + else: + compiler_name='CL' + linker_name='LINK' + lib_name='LIB' + return compiler_name,linker_name,lib_name +@conf +def find_msvc(conf): + if sys.platform=='cygwin': + conf.fatal('MSVC module does not work under cygwin Python!') + v=conf.env + path=v['PATH'] + compiler=v['MSVC_COMPILER'] + version=v['MSVC_VERSION'] + compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler) + v.MSVC_MANIFEST=(compiler=='msvc'and version>=8)or(compiler=='wsdk'and version>=6)or(compiler=='intel'and version>=11) + cxx=None + if v['CXX']:cxx=v['CXX'] + elif'CXX'in conf.environ:cxx=conf.environ['CXX'] + cxx=conf.find_program(compiler_name,var='CXX',path_list=path) + cxx=conf.cmd_to_list(cxx) + env=dict(conf.environ) + if path:env.update(PATH=';'.join(path)) + if not conf.cmd_and_log(cxx+['/nologo','/help'],env=env): + conf.fatal('the msvc compiler could not be identified') + v['CC']=v['CXX']=cxx + v['CC_NAME']=v['CXX_NAME']='msvc' + if not v['LINK_CXX']: + link=conf.find_program(linker_name,path_list=path) + if link:v['LINK_CXX']=link + else:conf.fatal('%s was not found (linker)'%linker_name) + v['LINK']=link + if not v['LINK_CC']: + v['LINK_CC']=v['LINK_CXX'] + if not v['AR']: + stliblink=conf.find_program(lib_name,path_list=path,var='AR') + if not stliblink:return + v['ARFLAGS']=['/NOLOGO'] + if v.MSVC_MANIFEST: + conf.find_program('MT',path_list=path,var='MT') + v['MTFLAGS']=['/NOLOGO'] + try: + conf.load('winres') + except Errors.WafError: + warn('Resource compiler not found. Compiling resource file is disabled') +@conf +def visual_studio_add_flags(self): + v=self.env + try:v.prepend_value('INCLUDES',[x for x in self.environ['INCLUDE'].split(';')if x]) + except Exception:pass + try:v.prepend_value('LIBPATH',[x for x in self.environ['LIB'].split(';')if x]) + except Exception:pass +@conf +def msvc_common_flags(conf): + v=conf.env + v['DEST_BINFMT']='pe' + v.append_value('CFLAGS',['/nologo']) + v.append_value('CXXFLAGS',['/nologo']) + v['DEFINES_ST']='/D%s' + v['CC_SRC_F']='' + v['CC_TGT_F']=['/c','/Fo'] + v['CXX_SRC_F']='' + v['CXX_TGT_F']=['/c','/Fo'] + if(v.MSVC_COMPILER=='msvc'and v.MSVC_VERSION>=8)or(v.MSVC_COMPILER=='wsdk'and v.MSVC_VERSION>=6): + v['CC_TGT_F']=['/FC']+v['CC_TGT_F'] + v['CXX_TGT_F']=['/FC']+v['CXX_TGT_F'] + v['CPPPATH_ST']='/I%s' + v['AR_TGT_F']=v['CCLNK_TGT_F']=v['CXXLNK_TGT_F']='/OUT:' + v['CFLAGS_CONSOLE']=v['CXXFLAGS_CONSOLE']=['/SUBSYSTEM:CONSOLE'] + v['CFLAGS_NATIVE']=v['CXXFLAGS_NATIVE']=['/SUBSYSTEM:NATIVE'] + v['CFLAGS_POSIX']=v['CXXFLAGS_POSIX']=['/SUBSYSTEM:POSIX'] + v['CFLAGS_WINDOWS']=v['CXXFLAGS_WINDOWS']=['/SUBSYSTEM:WINDOWS'] + v['CFLAGS_WINDOWSCE']=v['CXXFLAGS_WINDOWSCE']=['/SUBSYSTEM:WINDOWSCE'] + v['CFLAGS_CRT_MULTITHREADED']=v['CXXFLAGS_CRT_MULTITHREADED']=['/MT'] + v['CFLAGS_CRT_MULTITHREADED_DLL']=v['CXXFLAGS_CRT_MULTITHREADED_DLL']=['/MD'] + v['CFLAGS_CRT_MULTITHREADED_DBG']=v['CXXFLAGS_CRT_MULTITHREADED_DBG']=['/MTd'] + v['CFLAGS_CRT_MULTITHREADED_DLL_DBG']=v['CXXFLAGS_CRT_MULTITHREADED_DLL_DBG']=['/MDd'] + v['LIB_ST']='%s.lib' + v['LIBPATH_ST']='/LIBPATH:%s' + v['STLIB_ST']='%s.lib' + v['STLIBPATH_ST']='/LIBPATH:%s' + v.append_value('LINKFLAGS',['/NOLOGO']) + if v['MSVC_MANIFEST']: + v.append_value('LINKFLAGS',['/MANIFEST']) + v['CFLAGS_cshlib']=[] + v['CXXFLAGS_cxxshlib']=[] + v['LINKFLAGS_cshlib']=v['LINKFLAGS_cxxshlib']=['/DLL'] + v['cshlib_PATTERN']=v['cxxshlib_PATTERN']='%s.dll' + v['implib_PATTERN']='%s.lib' + v['IMPLIB_ST']='/IMPLIB:%s' + v['LINKFLAGS_cstlib']=[] + v['cstlib_PATTERN']=v['cxxstlib_PATTERN']='%s.lib' + v['cprogram_PATTERN']=v['cxxprogram_PATTERN']='%s.exe' +@after_method('apply_link') +@feature('c','cxx') +def apply_flags_msvc(self): + if self.env.CC_NAME!='msvc'or not getattr(self,'link_task',None): + return + is_static=isinstance(self.link_task,ccroot.stlink_task) + subsystem=getattr(self,'subsystem','') + if subsystem: + subsystem='/subsystem:%s'%subsystem + flags=is_static and'ARFLAGS'or'LINKFLAGS' + self.env.append_value(flags,subsystem) + if not is_static: + for f in self.env.LINKFLAGS: + d=f.lower() + if d[1:]=='debug': + pdbnode=self.link_task.outputs[0].change_ext('.pdb') + self.link_task.outputs.append(pdbnode) + try: + self.install_task.source.append(pdbnode) + except AttributeError: + pass + break +@feature('cprogram','cshlib','cxxprogram','cxxshlib') +@after_method('apply_link') +def apply_manifest(self): + if self.env.CC_NAME=='msvc'and self.env.MSVC_MANIFEST and getattr(self,'link_task',None): + out_node=self.link_task.outputs[0] + man_node=out_node.parent.find_or_declare(out_node.name+'.manifest') + self.link_task.outputs.append(man_node) + self.link_task.do_manifest=True +def exec_mf(self): + env=self.env + mtool=env['MT'] + if not mtool: + return 0 + self.do_manifest=False + outfile=self.outputs[0].abspath() + manifest=None + for out_node in self.outputs: + if out_node.name.endswith('.manifest'): + manifest=out_node.abspath() + break + if manifest is None: + return 0 + mode='' + if'cprogram'in self.generator.features or'cxxprogram'in self.generator.features: + mode='1' + elif'cshlib'in self.generator.features or'cxxshlib'in self.generator.features: + mode='2' + debug('msvc: embedding manifest in mode %r'%mode) + lst=[] + lst.append(env['MT']) + lst.extend(Utils.to_list(env['MTFLAGS'])) + lst.extend(['-manifest',manifest]) + lst.append('-outputresource:%s;%s'%(outfile,mode)) + lst=[lst] + return self.exec_command(*lst) +def quote_response_command(self,flag): + if flag.find(' ')>-1: + for x in('/LIBPATH:','/IMPLIB:','/OUT:','/I'): + if flag.startswith(x): + flag='%s"%s"'%(x,flag[len(x):]) + break + else: + flag='"%s"'%flag + return flag +def exec_response_command(self,cmd,**kw): + try: + tmp=None + if sys.platform.startswith('win')and isinstance(cmd,list)and len(' '.join(cmd))>=8192: + program=cmd[0] + cmd=[self.quote_response_command(x)for x in cmd] + (fd,tmp)=tempfile.mkstemp() + os.write(fd,'\r\n'.join(i.replace('\\','\\\\')for i in cmd[1:])) + os.close(fd) + cmd=[program,'@'+tmp] + ret=self.generator.bld.exec_command(cmd,**kw) + finally: + if tmp: + try: + os.remove(tmp) + except OSError: + pass + return ret +def exec_command_msvc(self,*k,**kw): + if isinstance(k[0],list): + lst=[] + carry='' + for a in k[0]: + if a=='/Fo'or a=='/doc'or a[-1]==':': + carry=a + else: + lst.append(carry+a) + carry='' + k=[lst] + if self.env['PATH']: + env=dict(self.env.env or os.environ) + env.update(PATH=';'.join(self.env['PATH'])) + kw['env']=env + bld=self.generator.bld + try: + if not kw.get('cwd',None): + kw['cwd']=bld.cwd + except AttributeError: + bld.cwd=kw['cwd']=bld.variant_dir + ret=self.exec_response_command(k[0],**kw) + if not ret and getattr(self,'do_manifest',None): + ret=self.exec_mf() + return ret +def wrap_class(class_name): + cls=Task.classes.get(class_name,None) + if not cls: + return None + derived_class=type(class_name,(cls,),{}) + def exec_command(self,*k,**kw): + if self.env['CC_NAME']=='msvc': + return self.exec_command_msvc(*k,**kw) + else: + return super(derived_class,self).exec_command(*k,**kw) + derived_class.exec_command=exec_command + derived_class.exec_response_command=exec_response_command + derived_class.quote_response_command=quote_response_command + derived_class.exec_command_msvc=exec_command_msvc + derived_class.exec_mf=exec_mf + return derived_class +for k in'c cxx cprogram cxxprogram cshlib cxxshlib cstlib cxxstlib'.split(): + wrap_class(k) +def make_winapp(self,family): + append=self.env.append_unique + append('DEFINES','WINAPI_FAMILY=%s'%family) + append('CXXFLAGS','/ZW') + append('CXXFLAGS','/TP') + for lib_path in self.env.LIBPATH: + append('CXXFLAGS','/AI%s'%lib_path) +@feature('winphoneapp') +@after_method('process_use') +@after_method('propagate_uselib_vars') +def make_winphone_app(self): + make_winapp(self,'WINAPI_FAMILY_PHONE_APP') + conf.env.append_unique('LINKFLAGS','/NODEFAULTLIB:ole32.lib') + conf.env.append_unique('LINKFLAGS','PhoneAppModelHost.lib') +@feature('winapp') +@after_method('process_use') +@after_method('propagate_uselib_vars') +def make_windows_app(self): + make_winapp(self,'WINAPI_FAMILY_DESKTOP_APP') diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/nasm.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/nasm.py new file mode 100644 index 0000000..00e650a --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/nasm.py @@ -0,0 +1,16 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os +import waflib.Tools.asm +from waflib.TaskGen import feature +@feature('asm') +def apply_nasm_vars(self): + self.env.append_value('ASFLAGS',self.to_list(getattr(self,'nasm_flags',[]))) +def configure(conf): + nasm=conf.find_program(['nasm','yasm'],var='AS') + conf.env.AS_TGT_F=['-o'] + conf.env.ASLNK_TGT_F=['-o'] + conf.load('asm') + conf.env.ASMPATH_ST='-I%s'+os.sep diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/perl.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/perl.py new file mode 100644 index 0000000..8b6c2f8 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/perl.py @@ -0,0 +1,80 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os +from waflib import Task,Options,Utils +from waflib.Configure import conf +from waflib.TaskGen import extension,feature,before_method +@before_method('apply_incpaths','apply_link','propagate_uselib_vars') +@feature('perlext') +def init_perlext(self): + self.uselib=self.to_list(getattr(self,'uselib',[])) + if not'PERLEXT'in self.uselib:self.uselib.append('PERLEXT') + self.env['cshlib_PATTERN']=self.env['cxxshlib_PATTERN']=self.env['perlext_PATTERN'] +@extension('.xs') +def xsubpp_file(self,node): + outnode=node.change_ext('.c') + self.create_task('xsubpp',node,outnode) + self.source.append(outnode) +class xsubpp(Task.Task): + run_str='${PERL} ${XSUBPP} -noprototypes -typemap ${EXTUTILS_TYPEMAP} ${SRC} > ${TGT}' + color='BLUE' + ext_out=['.h'] +@conf +def check_perl_version(self,minver=None): + res=True + if minver: + cver='.'.join(map(str,minver)) + else: + cver='' + self.start_msg('Checking for minimum perl version %s'%cver) + perl=getattr(Options.options,'perlbinary',None) + if not perl: + perl=self.find_program('perl',var='PERL') + if not perl: + self.end_msg("Perl not found",color="YELLOW") + return False + self.env['PERL']=perl + version=self.cmd_and_log([perl,"-e",'printf \"%vd\", $^V']) + if not version: + res=False + version="Unknown" + elif not minver is None: + ver=tuple(map(int,version.split("."))) + if ver<minver: + res=False + self.end_msg(version,color=res and"GREEN"or"YELLOW") + return res +@conf +def check_perl_module(self,module): + cmd=[self.env['PERL'],'-e','use %s'%module] + self.start_msg('perl module %s'%module) + try: + r=self.cmd_and_log(cmd) + except Exception: + self.end_msg(False) + return None + self.end_msg(r or True) + return r +@conf +def check_perl_ext_devel(self): + env=self.env + perl=env.PERL + if not perl: + self.fatal('find perl first') + def read_out(cmd): + return Utils.to_list(self.cmd_and_log(perl+cmd)) + env['LINKFLAGS_PERLEXT']=read_out(" -MConfig -e'print $Config{lddlflags}'") + env['INCLUDES_PERLEXT']=read_out(" -MConfig -e'print \"$Config{archlib}/CORE\"'") + env['CFLAGS_PERLEXT']=read_out(" -MConfig -e'print \"$Config{ccflags} $Config{cccdlflags}\"'") + env['XSUBPP']=read_out(" -MConfig -e'print \"$Config{privlib}/ExtUtils/xsubpp$Config{exe_ext}\"'") + env['EXTUTILS_TYPEMAP']=read_out(" -MConfig -e'print \"$Config{privlib}/ExtUtils/typemap\"'") + if not getattr(Options.options,'perlarchdir',None): + env['ARCHDIR_PERL']=self.cmd_and_log(perl+" -MConfig -e'print $Config{sitearch}'") + else: + env['ARCHDIR_PERL']=getattr(Options.options,'perlarchdir') + env['perlext_PATTERN']='%s.'+self.cmd_and_log(perl+" -MConfig -e'print $Config{dlext}'") +def options(opt): + opt.add_option('--with-perl-binary',type='string',dest='perlbinary',help='Specify alternate perl binary',default=None) + opt.add_option('--with-perl-archdir',type='string',dest='perlarchdir',help='Specify directory where to install arch specific files',default=None) diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/python.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/python.py new file mode 100644 index 0000000..713f816 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/python.py @@ -0,0 +1,345 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,sys +from waflib import Utils,Options,Errors,Logs +from waflib.TaskGen import extension,before_method,after_method,feature +from waflib.Configure import conf +FRAG=''' +#include <Python.h> +#ifdef __cplusplus +extern "C" { +#endif + void Py_Initialize(void); + void Py_Finalize(void); +#ifdef __cplusplus +} +#endif +int main(int argc, char **argv) +{ + (void)argc; (void)argv; + Py_Initialize(); + Py_Finalize(); + return 0; +} +''' +INST=''' +import sys, py_compile +py_compile.compile(sys.argv[1], sys.argv[2], sys.argv[3]) +''' +DISTUTILS_IMP=['from distutils.sysconfig import get_config_var, get_python_lib'] +@extension('.py') +def process_py(self,node): + try: + if not self.bld.is_install: + return + except AttributeError: + return + try: + if not self.install_path: + return + except AttributeError: + self.install_path='${PYTHONDIR}' + def inst_py(ctx): + install_from=getattr(self,'install_from',None) + if install_from: + install_from=self.path.find_dir(install_from) + install_pyfile(self,node,install_from) + self.bld.add_post_fun(inst_py) +def install_pyfile(self,node,install_from=None): + from_node=install_from or node.parent + tsk=self.bld.install_as(self.install_path+'/'+node.path_from(from_node),node,postpone=False) + path=tsk.get_install_path() + if self.bld.is_install<0: + Logs.info("+ removing byte compiled python files") + for x in'co': + try: + os.remove(path+x) + except OSError: + pass + if self.bld.is_install>0: + try: + st1=os.stat(path) + except OSError: + Logs.error('The python file is missing, this should not happen') + for x in['c','o']: + do_inst=self.env['PY'+x.upper()] + try: + st2=os.stat(path+x) + except OSError: + pass + else: + if st1.st_mtime<=st2.st_mtime: + do_inst=False + if do_inst: + lst=(x=='o')and[self.env['PYFLAGS_OPT']]or[] + (a,b,c)=(path,path+x,tsk.get_install_path(destdir=False)+x) + argv=self.env['PYTHON']+lst+['-c',INST,a,b,c] + Logs.info('+ byte compiling %r'%(path+x)) + env=self.env.env or None + ret=Utils.subprocess.Popen(argv,env=env).wait() + if ret: + raise Errors.WafError('py%s compilation failed %r'%(x,path)) +@feature('py') +def feature_py(self): + pass +@feature('pyext') +@before_method('propagate_uselib_vars','apply_link') +@after_method('apply_bundle') +def init_pyext(self): + self.uselib=self.to_list(getattr(self,'uselib',[])) + if not'PYEXT'in self.uselib: + self.uselib.append('PYEXT') + self.env.cshlib_PATTERN=self.env.cxxshlib_PATTERN=self.env.macbundle_PATTERN=self.env.pyext_PATTERN + self.env.fcshlib_PATTERN=self.env.dshlib_PATTERN=self.env.pyext_PATTERN + try: + if not self.install_path: + return + except AttributeError: + self.install_path='${PYTHONARCHDIR}' +@feature('pyext') +@before_method('apply_link','apply_bundle') +def set_bundle(self): + if Utils.unversioned_sys_platform()=='darwin': + self.mac_bundle=True +@before_method('propagate_uselib_vars') +@feature('pyembed') +def init_pyembed(self): + self.uselib=self.to_list(getattr(self,'uselib',[])) + if not'PYEMBED'in self.uselib: + self.uselib.append('PYEMBED') +@conf +def get_python_variables(self,variables,imports=None): + if not imports: + try: + imports=self.python_imports + except AttributeError: + imports=DISTUTILS_IMP + program=list(imports) + program.append('') + for v in variables: + program.append("print(repr(%s))"%v) + os_env=dict(os.environ) + try: + del os_env['MACOSX_DEPLOYMENT_TARGET'] + except KeyError: + pass + try: + out=self.cmd_and_log(self.env.PYTHON+['-c','\n'.join(program)],env=os_env) + except Errors.WafError: + self.fatal('The distutils module is unusable: install "python-devel"?') + self.to_log(out) + return_values=[] + for s in out.split('\n'): + s=s.strip() + if not s: + continue + if s=='None': + return_values.append(None) + elif(s[0]=="'"and s[-1]=="'")or(s[0]=='"'and s[-1]=='"'): + return_values.append(eval(s)) + elif s[0].isdigit(): + return_values.append(int(s)) + else:break + return return_values +@conf +def check_python_headers(conf): + env=conf.env + if not env['CC_NAME']and not env['CXX_NAME']: + conf.fatal('load a compiler first (gcc, g++, ..)') + if not env['PYTHON_VERSION']: + conf.check_python_version() + pybin=conf.env.PYTHON + if not pybin: + conf.fatal('Could not find the python executable') + v='prefix SO LDFLAGS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET LDSHARED CFLAGS LDVERSION'.split() + try: + lst=conf.get_python_variables(["get_config_var('%s') or ''"%x for x in v]) + except RuntimeError: + conf.fatal("Python development headers not found (-v for details).") + vals=['%s = %r'%(x,y)for(x,y)in zip(v,lst)] + conf.to_log("Configuration returned from %r:\n%r\n"%(pybin,'\n'.join(vals))) + dct=dict(zip(v,lst)) + x='MACOSX_DEPLOYMENT_TARGET' + if dct[x]: + conf.env[x]=conf.environ[x]=dct[x] + env['pyext_PATTERN']='%s'+dct['SO'] + all_flags=dct['LDFLAGS']+' '+dct['CFLAGS'] + conf.parse_flags(all_flags,'PYEMBED') + all_flags=dct['LDFLAGS']+' '+dct['LDSHARED']+' '+dct['CFLAGS'] + conf.parse_flags(all_flags,'PYEXT') + result=None + if not dct["LDVERSION"]: + dct["LDVERSION"]=env['PYTHON_VERSION'] + for name in('python'+dct['LDVERSION'],'python'+env['PYTHON_VERSION']+'m','python'+env['PYTHON_VERSION'].replace('.','')): + if not result and env['LIBPATH_PYEMBED']: + path=env['LIBPATH_PYEMBED'] + conf.to_log("\n\n# Trying default LIBPATH_PYEMBED: %r\n"%path) + result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in LIBPATH_PYEMBED'%name) + if not result and dct['LIBDIR']: + path=[dct['LIBDIR']] + conf.to_log("\n\n# try again with -L$python_LIBDIR: %r\n"%path) + result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in LIBDIR'%name) + if not result and dct['LIBPL']: + path=[dct['LIBPL']] + conf.to_log("\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n") + result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in python_LIBPL'%name) + if not result: + path=[os.path.join(dct['prefix'],"libs")] + conf.to_log("\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n") + result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in $prefix/libs'%name) + if result: + break + if result: + env['LIBPATH_PYEMBED']=path + env.append_value('LIB_PYEMBED',[name]) + else: + conf.to_log("\n\n### LIB NOT FOUND\n") + if(Utils.is_win32 or sys.platform.startswith('os2')or dct['Py_ENABLE_SHARED']): + env['LIBPATH_PYEXT']=env['LIBPATH_PYEMBED'] + env['LIB_PYEXT']=env['LIB_PYEMBED'] + num='.'.join(env['PYTHON_VERSION'].split('.')[:2]) + conf.find_program([''.join(pybin)+'-config','python%s-config'%num,'python-config-%s'%num,'python%sm-config'%num],var='PYTHON_CONFIG',mandatory=False) + includes=[] + if conf.env.PYTHON_CONFIG: + for incstr in conf.cmd_and_log([conf.env.PYTHON_CONFIG,'--includes']).strip().split(): + if(incstr.startswith('-I')or incstr.startswith('/I')): + incstr=incstr[2:] + if incstr not in includes: + includes.append(incstr) + conf.to_log("Include path for Python extensions (found via python-config --includes): %r\n"%(includes,)) + env['INCLUDES_PYEXT']=includes + env['INCLUDES_PYEMBED']=includes + else: + conf.to_log("Include path for Python extensions ""(found via distutils module): %r\n"%(dct['INCLUDEPY'],)) + env['INCLUDES_PYEXT']=[dct['INCLUDEPY']] + env['INCLUDES_PYEMBED']=[dct['INCLUDEPY']] + if env['CC_NAME']=='gcc': + env.append_value('CFLAGS_PYEMBED',['-fno-strict-aliasing']) + env.append_value('CFLAGS_PYEXT',['-fno-strict-aliasing']) + if env['CXX_NAME']=='gcc': + env.append_value('CXXFLAGS_PYEMBED',['-fno-strict-aliasing']) + env.append_value('CXXFLAGS_PYEXT',['-fno-strict-aliasing']) + if env.CC_NAME=="msvc": + from distutils.msvccompiler import MSVCCompiler + dist_compiler=MSVCCompiler() + dist_compiler.initialize() + env.append_value('CFLAGS_PYEXT',dist_compiler.compile_options) + env.append_value('CXXFLAGS_PYEXT',dist_compiler.compile_options) + env.append_value('LINKFLAGS_PYEXT',dist_compiler.ldflags_shared) + try: + conf.check(header_name='Python.h',define_name='HAVE_PYTHON_H',uselib='PYEMBED',fragment=FRAG,errmsg=':-(') + except conf.errors.ConfigurationError: + xx=conf.env.CXX_NAME and'cxx'or'c' + flags=['--cflags','--libs','--ldflags'] + for f in flags: + conf.check_cfg(msg='Asking python-config for pyembed %s flags'%f,path=conf.env.PYTHON_CONFIG,package='',uselib_store='PYEMBED',args=[f]) + conf.check(header_name='Python.h',define_name='HAVE_PYTHON_H',msg='Getting pyembed flags from python-config',fragment=FRAG,errmsg='Could not build a python embedded interpreter',features='%s %sprogram pyembed'%(xx,xx)) + for f in flags: + conf.check_cfg(msg='Asking python-config for pyext %s flags'%f,path=conf.env.PYTHON_CONFIG,package='',uselib_store='PYEXT',args=[f]) + conf.check(header_name='Python.h',define_name='HAVE_PYTHON_H',msg='Getting pyext flags from python-config',features='%s %sshlib pyext'%(xx,xx),fragment=FRAG,errmsg='Could not build python extensions') +@conf +def check_python_version(conf,minver=None): + assert minver is None or isinstance(minver,tuple) + pybin=conf.env['PYTHON'] + if not pybin: + conf.fatal('could not find the python executable') + cmd=pybin+['-c','import sys\nfor x in sys.version_info: print(str(x))'] + Logs.debug('python: Running python command %r'%cmd) + lines=conf.cmd_and_log(cmd).split() + assert len(lines)==5,"found %i lines, expected 5: %r"%(len(lines),lines) + pyver_tuple=(int(lines[0]),int(lines[1]),int(lines[2]),lines[3],int(lines[4])) + result=(minver is None)or(pyver_tuple>=minver) + if result: + pyver='.'.join([str(x)for x in pyver_tuple[:2]]) + conf.env['PYTHON_VERSION']=pyver + if'PYTHONDIR'in conf.environ: + pydir=conf.environ['PYTHONDIR'] + else: + if Utils.is_win32: + (python_LIBDEST,pydir)=conf.get_python_variables(["get_config_var('LIBDEST') or ''","get_python_lib(standard_lib=0, prefix=%r) or ''"%conf.env['PREFIX']]) + else: + python_LIBDEST=None + (pydir,)=conf.get_python_variables(["get_python_lib(standard_lib=0, prefix=%r) or ''"%conf.env['PREFIX']]) + if python_LIBDEST is None: + if conf.env['LIBDIR']: + python_LIBDEST=os.path.join(conf.env['LIBDIR'],"python"+pyver) + else: + python_LIBDEST=os.path.join(conf.env['PREFIX'],"lib","python"+pyver) + if'PYTHONARCHDIR'in conf.environ: + pyarchdir=conf.environ['PYTHONARCHDIR'] + else: + (pyarchdir,)=conf.get_python_variables(["get_python_lib(plat_specific=1, standard_lib=0, prefix=%r) or ''"%conf.env['PREFIX']]) + if not pyarchdir: + pyarchdir=pydir + if hasattr(conf,'define'): + conf.define('PYTHONDIR',pydir) + conf.define('PYTHONARCHDIR',pyarchdir) + conf.env['PYTHONDIR']=pydir + conf.env['PYTHONARCHDIR']=pyarchdir + pyver_full='.'.join(map(str,pyver_tuple[:3])) + if minver is None: + conf.msg('Checking for python version',pyver_full) + else: + minver_str='.'.join(map(str,minver)) + conf.msg('Checking for python version',pyver_tuple,">= %s"%(minver_str,)and'GREEN'or'YELLOW') + if not result: + conf.fatal('The python version is too old, expecting %r'%(minver,)) +PYTHON_MODULE_TEMPLATE=''' +import %s as current_module +version = getattr(current_module, '__version__', None) +if version is not None: + print(str(version)) +else: + print('unknown version') +''' +@conf +def check_python_module(conf,module_name,condition=''): + msg='Python module %s'%module_name + if condition: + msg='%s (%s)'%(msg,condition) + conf.start_msg(msg) + try: + ret=conf.cmd_and_log(conf.env['PYTHON']+['-c',PYTHON_MODULE_TEMPLATE%module_name]) + except Exception: + conf.end_msg(False) + conf.fatal('Could not find the python module %r'%module_name) + ret=ret.strip() + if condition: + conf.end_msg(ret) + if ret=='unknown version': + conf.fatal('Could not check the %s version'%module_name) + from distutils.version import LooseVersion + def num(*k): + if isinstance(k[0],int): + return LooseVersion('.'.join([str(x)for x in k])) + else: + return LooseVersion(k[0]) + d={'num':num,'ver':LooseVersion(ret)} + ev=eval(condition,{},d) + if not ev: + conf.fatal('The %s version does not satisfy the requirements'%module_name) + else: + if ret=='unknown version': + conf.end_msg(True) + else: + conf.end_msg(ret) +def configure(conf): + try: + conf.find_program('python',var='PYTHON') + except conf.errors.ConfigurationError: + Logs.warn("could not find a python executable, setting to sys.executable '%s'"%sys.executable) + conf.env.PYTHON=sys.executable + if conf.env.PYTHON!=sys.executable: + Logs.warn("python executable %r differs from system %r"%(conf.env.PYTHON,sys.executable)) + conf.env.PYTHON=conf.cmd_to_list(conf.env.PYTHON) + v=conf.env + v['PYCMD']='"import sys, py_compile;py_compile.compile(sys.argv[1], sys.argv[2])"' + v['PYFLAGS']='' + v['PYFLAGS_OPT']='-O' + v['PYC']=getattr(Options.options,'pyc',1) + v['PYO']=getattr(Options.options,'pyo',1) +def options(opt): + opt.add_option('--nopyc',action='store_false',default=1,help='Do not install bytecode compiled .pyc files (configuration) [Default:install]',dest='pyc') + opt.add_option('--nopyo',action='store_false',default=1,help='Do not install optimised compiled .pyo files (configuration) [Default:install]',dest='pyo') diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/qt4.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/qt4.py new file mode 100644 index 0000000..7926a89 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/qt4.py @@ -0,0 +1,455 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +try: + from xml.sax import make_parser + from xml.sax.handler import ContentHandler +except ImportError: + has_xml=False + ContentHandler=object +else: + has_xml=True +import os,sys +from waflib.Tools import c_preproc,cxx +from waflib import Task,Utils,Options,Errors +from waflib.TaskGen import feature,after_method,extension +from waflib.Configure import conf +from waflib import Logs +MOC_H=['.h','.hpp','.hxx','.hh'] +EXT_RCC=['.qrc'] +EXT_UI=['.ui'] +EXT_QT4=['.cpp','.cc','.cxx','.C'] +QT4_LIBS="QtCore QtGui QtUiTools QtNetwork QtOpenGL QtSql QtSvg QtTest QtXml QtXmlPatterns QtWebKit Qt3Support QtHelp QtScript QtDeclarative QtDesigner" +class qxx(Task.classes['cxx']): + def __init__(self,*k,**kw): + Task.Task.__init__(self,*k,**kw) + self.moc_done=0 + def scan(self): + (nodes,names)=c_preproc.scan(self) + lst=[] + for x in nodes: + if x.name.endswith('.moc'): + s=x.path_from(self.inputs[0].parent.get_bld()) + if s not in names: + names.append(s) + else: + lst.append(x) + return(lst,names) + def runnable_status(self): + if self.moc_done: + return Task.Task.runnable_status(self) + else: + for t in self.run_after: + if not t.hasrun: + return Task.ASK_LATER + self.add_moc_tasks() + return Task.Task.runnable_status(self) + def create_moc_task(self,h_node,m_node): + try: + moc_cache=self.generator.bld.moc_cache + except AttributeError: + moc_cache=self.generator.bld.moc_cache={} + try: + return moc_cache[h_node] + except KeyError: + tsk=moc_cache[h_node]=Task.classes['moc'](env=self.env,generator=self.generator) + tsk.set_inputs(h_node) + tsk.set_outputs(m_node) + gen=self.generator.bld.producer + gen.outstanding.insert(0,tsk) + gen.total+=1 + return tsk + def add_moc_tasks(self): + node=self.inputs[0] + bld=self.generator.bld + try: + self.signature() + except KeyError: + pass + else: + delattr(self,'cache_sig') + moctasks=[] + mocfiles=[] + try: + tmp_lst=bld.raw_deps[self.uid()] + bld.raw_deps[self.uid()]=[] + except KeyError: + tmp_lst=[] + for d in tmp_lst: + if not d.endswith('.moc'): + continue + if d in mocfiles: + Logs.error("paranoia owns") + continue + mocfiles.append(d) + h_node=None + try:ext=Options.options.qt_header_ext.split() + except AttributeError:pass + if not ext:ext=MOC_H + base2=d[:-4] + for x in[node.parent]+self.generator.includes_nodes: + for e in ext: + h_node=x.find_node(base2+e) + if h_node: + break + if h_node: + m_node=h_node.change_ext('.moc') + break + else: + for k in EXT_QT4: + if base2.endswith(k): + for x in[node.parent]+self.generator.includes_nodes: + h_node=x.find_node(base2) + if h_node: + break + if h_node: + m_node=h_node.change_ext(k+'.moc') + break + if not h_node: + raise Errors.WafError('no header found for %r which is a moc file'%d) + bld.node_deps[(self.inputs[0].parent.abspath(),m_node.name)]=h_node + task=self.create_moc_task(h_node,m_node) + moctasks.append(task) + tmp_lst=bld.raw_deps[self.uid()]=mocfiles + lst=bld.node_deps.get(self.uid(),()) + for d in lst: + name=d.name + if name.endswith('.moc'): + task=self.create_moc_task(bld.node_deps[(self.inputs[0].parent.abspath(),name)],d) + moctasks.append(task) + self.run_after.update(set(moctasks)) + self.moc_done=1 + run=Task.classes['cxx'].__dict__['run'] +class trans_update(Task.Task): + run_str='${QT_LUPDATE} ${SRC} -ts ${TGT}' + color='BLUE' +Task.update_outputs(trans_update) +class XMLHandler(ContentHandler): + def __init__(self): + self.buf=[] + self.files=[] + def startElement(self,name,attrs): + if name=='file': + self.buf=[] + def endElement(self,name): + if name=='file': + self.files.append(str(''.join(self.buf))) + def characters(self,cars): + self.buf.append(cars) +@extension(*EXT_RCC) +def create_rcc_task(self,node): + rcnode=node.change_ext('_rc.cpp') + rcctask=self.create_task('rcc',node,rcnode) + cpptask=self.create_task('cxx',rcnode,rcnode.change_ext('.o')) + try: + self.compiled_tasks.append(cpptask) + except AttributeError: + self.compiled_tasks=[cpptask] + return cpptask +@extension(*EXT_UI) +def create_uic_task(self,node): + uictask=self.create_task('ui4',node) + uictask.outputs=[self.path.find_or_declare(self.env['ui_PATTERN']%node.name[:-3])] +@extension('.ts') +def add_lang(self,node): + self.lang=self.to_list(getattr(self,'lang',[]))+[node] +@feature('qt4') +@after_method('apply_link') +def apply_qt4(self): + if getattr(self,'lang',None): + qmtasks=[] + for x in self.to_list(self.lang): + if isinstance(x,str): + x=self.path.find_resource(x+'.ts') + qmtasks.append(self.create_task('ts2qm',x,x.change_ext('.qm'))) + if getattr(self,'update',None)and Options.options.trans_qt4: + cxxnodes=[a.inputs[0]for a in self.compiled_tasks]+[a.inputs[0]for a in self.tasks if getattr(a,'inputs',None)and a.inputs[0].name.endswith('.ui')] + for x in qmtasks: + self.create_task('trans_update',cxxnodes,x.inputs) + if getattr(self,'langname',None): + qmnodes=[x.outputs[0]for x in qmtasks] + rcnode=self.langname + if isinstance(rcnode,str): + rcnode=self.path.find_or_declare(rcnode+'.qrc') + t=self.create_task('qm2rcc',qmnodes,rcnode) + k=create_rcc_task(self,t.outputs[0]) + self.link_task.inputs.append(k.outputs[0]) + lst=[] + for flag in self.to_list(self.env['CXXFLAGS']): + if len(flag)<2:continue + f=flag[0:2] + if f in['-D','-I','/D','/I']: + if(f[0]=='/'): + lst.append('-'+flag[1:]) + else: + lst.append(flag) + self.env.append_value('MOC_FLAGS',lst) +@extension(*EXT_QT4) +def cxx_hook(self,node): + return self.create_compiled_task('qxx',node) +class rcc(Task.Task): + color='BLUE' + run_str='${QT_RCC} -name ${SRC[0].name} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}' + ext_out=['.h'] + def scan(self): + node=self.inputs[0] + if not has_xml: + Logs.error('no xml support was found, the rcc dependencies will be incomplete!') + return([],[]) + parser=make_parser() + curHandler=XMLHandler() + parser.setContentHandler(curHandler) + fi=open(self.inputs[0].abspath(),'r') + try: + parser.parse(fi) + finally: + fi.close() + nodes=[] + names=[] + root=self.inputs[0].parent + for x in curHandler.files: + nd=root.find_resource(x) + if nd:nodes.append(nd) + else:names.append(x) + return(nodes,names) +class moc(Task.Task): + color='BLUE' + run_str='${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}' +class ui4(Task.Task): + color='BLUE' + run_str='${QT_UIC} ${SRC} -o ${TGT}' + ext_out=['.h'] +class ts2qm(Task.Task): + color='BLUE' + run_str='${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}' +class qm2rcc(Task.Task): + color='BLUE' + after='ts2qm' + def run(self): + txt='\n'.join(['<file>%s</file>'%k.path_from(self.outputs[0].parent)for k in self.inputs]) + code='<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n%s\n</qresource>\n</RCC>'%txt + self.outputs[0].write(code) +def configure(self): + self.find_qt4_binaries() + self.set_qt4_libs_to_check() + self.set_qt4_defines() + self.find_qt4_libraries() + self.add_qt4_rpath() + self.simplify_qt4_libs() +@conf +def find_qt4_binaries(self): + env=self.env + opt=Options.options + qtdir=getattr(opt,'qtdir','') + qtbin=getattr(opt,'qtbin','') + paths=[] + if qtdir: + qtbin=os.path.join(qtdir,'bin') + if not qtdir: + qtdir=os.environ.get('QT4_ROOT','') + qtbin=os.environ.get('QT4_BIN',None)or os.path.join(qtdir,'bin') + if qtbin: + paths=[qtbin] + if not qtdir: + paths=os.environ.get('PATH','').split(os.pathsep) + paths.append('/usr/share/qt4/bin/') + try: + lst=Utils.listdir('/usr/local/Trolltech/') + except OSError: + pass + else: + if lst: + lst.sort() + lst.reverse() + qtdir='/usr/local/Trolltech/%s/'%lst[0] + qtbin=os.path.join(qtdir,'bin') + paths.append(qtbin) + cand=None + prev_ver=['4','0','0'] + for qmk in['qmake-qt4','qmake4','qmake']: + try: + qmake=self.find_program(qmk,path_list=paths) + except self.errors.ConfigurationError: + pass + else: + try: + version=self.cmd_and_log([qmake,'-query','QT_VERSION']).strip() + except self.errors.WafError: + pass + else: + if version: + new_ver=version.split('.') + if new_ver>prev_ver: + cand=qmake + prev_ver=new_ver + if cand: + self.env.QMAKE=cand + else: + self.fatal('Could not find qmake for qt4') + qtbin=self.cmd_and_log([self.env.QMAKE,'-query','QT_INSTALL_BINS']).strip()+os.sep + def find_bin(lst,var): + if var in env: + return + for f in lst: + try: + ret=self.find_program(f,path_list=paths) + except self.errors.ConfigurationError: + pass + else: + env[var]=ret + break + find_bin(['uic-qt3','uic3'],'QT_UIC3') + find_bin(['uic-qt4','uic'],'QT_UIC') + if not env['QT_UIC']: + self.fatal('cannot find the uic compiler for qt4') + try: + uicver=self.cmd_and_log(env['QT_UIC']+" -version 2>&1").strip() + except self.errors.ConfigurationError: + self.fatal('this uic compiler is for qt3, add uic for qt4 to your path') + uicver=uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt','') + self.msg('Checking for uic version','%s'%uicver) + if uicver.find(' 3.')!=-1: + self.fatal('this uic compiler is for qt3, add uic for qt4 to your path') + find_bin(['moc-qt4','moc'],'QT_MOC') + find_bin(['rcc-qt4','rcc'],'QT_RCC') + find_bin(['lrelease-qt4','lrelease'],'QT_LRELEASE') + find_bin(['lupdate-qt4','lupdate'],'QT_LUPDATE') + env['UIC3_ST']='%s -o %s' + env['UIC_ST']='%s -o %s' + env['MOC_ST']='-o' + env['ui_PATTERN']='ui_%s.h' + env['QT_LRELEASE_FLAGS']=['-silent'] + env.MOCCPPPATH_ST='-I%s' + env.MOCDEFINES_ST='-D%s' +@conf +def find_qt4_libraries(self): + qtlibs=getattr(Options.options,'qtlibs',None)or os.environ.get("QT4_LIBDIR",None) + if not qtlibs: + try: + qtlibs=self.cmd_and_log([self.env.QMAKE,'-query','QT_INSTALL_LIBS']).strip() + except Errors.WafError: + qtdir=self.cmd_and_log([self.env.QMAKE,'-query','QT_INSTALL_PREFIX']).strip()+os.sep + qtlibs=os.path.join(qtdir,'lib') + self.msg('Found the Qt4 libraries in',qtlibs) + qtincludes=os.environ.get("QT4_INCLUDES",None)or self.cmd_and_log([self.env.QMAKE,'-query','QT_INSTALL_HEADERS']).strip() + env=self.env + if not'PKG_CONFIG_PATH'in os.environ: + os.environ['PKG_CONFIG_PATH']='%s:%s/pkgconfig:/usr/lib/qt4/lib/pkgconfig:/opt/qt4/lib/pkgconfig:/usr/lib/qt4/lib:/opt/qt4/lib'%(qtlibs,qtlibs) + try: + if os.environ.get("QT4_XCOMPILE",None): + raise self.errors.ConfigurationError() + self.check_cfg(atleast_pkgconfig_version='0.1') + except self.errors.ConfigurationError: + for i in self.qt4_vars: + uselib=i.upper() + if Utils.unversioned_sys_platform()=="darwin": + frameworkName=i+".framework" + qtDynamicLib=os.path.join(qtlibs,frameworkName,i) + if os.path.exists(qtDynamicLib): + env.append_unique('FRAMEWORK_'+uselib,i) + self.msg('Checking for %s'%i,qtDynamicLib,'GREEN') + else: + self.msg('Checking for %s'%i,False,'YELLOW') + env.append_unique('INCLUDES_'+uselib,os.path.join(qtlibs,frameworkName,'Headers')) + elif env.DEST_OS!="win32": + qtDynamicLib=os.path.join(qtlibs,"lib"+i+".so") + qtStaticLib=os.path.join(qtlibs,"lib"+i+".a") + if os.path.exists(qtDynamicLib): + env.append_unique('LIB_'+uselib,i) + self.msg('Checking for %s'%i,qtDynamicLib,'GREEN') + elif os.path.exists(qtStaticLib): + env.append_unique('LIB_'+uselib,i) + self.msg('Checking for %s'%i,qtStaticLib,'GREEN') + else: + self.msg('Checking for %s'%i,False,'YELLOW') + env.append_unique('LIBPATH_'+uselib,qtlibs) + env.append_unique('INCLUDES_'+uselib,qtincludes) + env.append_unique('INCLUDES_'+uselib,os.path.join(qtincludes,i)) + else: + for k in("lib%s.a","lib%s4.a","%s.lib","%s4.lib"): + lib=os.path.join(qtlibs,k%i) + if os.path.exists(lib): + env.append_unique('LIB_'+uselib,i+k[k.find("%s")+2:k.find('.')]) + self.msg('Checking for %s'%i,lib,'GREEN') + break + else: + self.msg('Checking for %s'%i,False,'YELLOW') + env.append_unique('LIBPATH_'+uselib,qtlibs) + env.append_unique('INCLUDES_'+uselib,qtincludes) + env.append_unique('INCLUDES_'+uselib,os.path.join(qtincludes,i)) + uselib=i.upper()+"_debug" + for k in("lib%sd.a","lib%sd4.a","%sd.lib","%sd4.lib"): + lib=os.path.join(qtlibs,k%i) + if os.path.exists(lib): + env.append_unique('LIB_'+uselib,i+k[k.find("%s")+2:k.find('.')]) + self.msg('Checking for %s'%i,lib,'GREEN') + break + else: + self.msg('Checking for %s'%i,False,'YELLOW') + env.append_unique('LIBPATH_'+uselib,qtlibs) + env.append_unique('INCLUDES_'+uselib,qtincludes) + env.append_unique('INCLUDES_'+uselib,os.path.join(qtincludes,i)) + else: + for i in self.qt4_vars_debug+self.qt4_vars: + self.check_cfg(package=i,args='--cflags --libs',mandatory=False) +@conf +def simplify_qt4_libs(self): + env=self.env + def process_lib(vars_,coreval): + for d in vars_: + var=d.upper() + if var=='QTCORE': + continue + value=env['LIBPATH_'+var] + if value: + core=env[coreval] + accu=[] + for lib in value: + if lib in core: + continue + accu.append(lib) + env['LIBPATH_'+var]=accu + process_lib(self.qt4_vars,'LIBPATH_QTCORE') + process_lib(self.qt4_vars_debug,'LIBPATH_QTCORE_DEBUG') +@conf +def add_qt4_rpath(self): + env=self.env + if getattr(Options.options,'want_rpath',False): + def process_rpath(vars_,coreval): + for d in vars_: + var=d.upper() + value=env['LIBPATH_'+var] + if value: + core=env[coreval] + accu=[] + for lib in value: + if var!='QTCORE': + if lib in core: + continue + accu.append('-Wl,--rpath='+lib) + env['RPATH_'+var]=accu + process_rpath(self.qt4_vars,'LIBPATH_QTCORE') + process_rpath(self.qt4_vars_debug,'LIBPATH_QTCORE_DEBUG') +@conf +def set_qt4_libs_to_check(self): + if not hasattr(self,'qt4_vars'): + self.qt4_vars=QT4_LIBS + self.qt4_vars=Utils.to_list(self.qt4_vars) + if not hasattr(self,'qt4_vars_debug'): + self.qt4_vars_debug=[a+'_debug'for a in self.qt4_vars] + self.qt4_vars_debug=Utils.to_list(self.qt4_vars_debug) +@conf +def set_qt4_defines(self): + if sys.platform!='win32': + return + for x in self.qt4_vars: + y=x[2:].upper() + self.env.append_unique('DEFINES_%s'%x.upper(),'QT_%s_LIB'%y) + self.env.append_unique('DEFINES_%s_DEBUG'%x.upper(),'QT_%s_LIB'%y) +def options(opt): + opt.add_option('--want-rpath',action='store_true',default=False,dest='want_rpath',help='enable the rpath for qt libraries') + opt.add_option('--header-ext',type='string',default='',help='header extension for moc files',dest='qt_header_ext') + for i in'qtdir qtbin qtlibs'.split(): + opt.add_option('--'+i,type='string',default='',dest=i) + opt.add_option('--translate',action="store_true",help="collect translation strings",dest="trans_qt4",default=False) diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/ruby.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/ruby.py new file mode 100644 index 0000000..04cddfb --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/ruby.py @@ -0,0 +1,103 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os +from waflib import Task,Options,Utils +from waflib.TaskGen import before_method,feature,after_method,Task,extension +from waflib.Configure import conf +@feature('rubyext') +@before_method('apply_incpaths','apply_lib_vars','apply_bundle','apply_link') +def init_rubyext(self): + self.install_path='${ARCHDIR_RUBY}' + self.uselib=self.to_list(getattr(self,'uselib','')) + if not'RUBY'in self.uselib: + self.uselib.append('RUBY') + if not'RUBYEXT'in self.uselib: + self.uselib.append('RUBYEXT') +@feature('rubyext') +@before_method('apply_link','propagate_uselib') +def apply_ruby_so_name(self): + self.env['cshlib_PATTERN']=self.env['cxxshlib_PATTERN']=self.env['rubyext_PATTERN'] +@conf +def check_ruby_version(self,minver=()): + if Options.options.rubybinary: + self.env.RUBY=Options.options.rubybinary + else: + self.find_program('ruby',var='RUBY') + ruby=self.env.RUBY + try: + version=self.cmd_and_log([ruby,'-e','puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip() + except Exception: + self.fatal('could not determine ruby version') + self.env.RUBY_VERSION=version + try: + ver=tuple(map(int,version.split("."))) + except Exception: + self.fatal('unsupported ruby version %r'%version) + cver='' + if minver: + if ver<minver: + self.fatal('ruby is too old %r'%ver) + cver='.'.join([str(x)for x in minver]) + else: + cver=ver + self.msg('Checking for ruby version %s'%str(minver or''),cver) +@conf +def check_ruby_ext_devel(self): + if not self.env.RUBY: + self.fatal('ruby detection is required first') + if not self.env.CC_NAME and not self.env.CXX_NAME: + self.fatal('load a c/c++ compiler first') + version=tuple(map(int,self.env.RUBY_VERSION.split("."))) + def read_out(cmd): + return Utils.to_list(self.cmd_and_log([self.env.RUBY,'-rrbconfig','-e',cmd])) + def read_config(key): + return read_out('puts Config::CONFIG[%r]'%key) + ruby=self.env['RUBY'] + archdir=read_config('archdir') + cpppath=archdir + if version>=(1,9,0): + ruby_hdrdir=read_config('rubyhdrdir') + cpppath+=ruby_hdrdir + cpppath+=[os.path.join(ruby_hdrdir[0],read_config('arch')[0])] + self.check(header_name='ruby.h',includes=cpppath,errmsg='could not find ruby header file') + self.env.LIBPATH_RUBYEXT=read_config('libdir') + self.env.LIBPATH_RUBYEXT+=archdir + self.env.INCLUDES_RUBYEXT=cpppath + self.env.CFLAGS_RUBYEXT=read_config('CCDLFLAGS') + self.env.rubyext_PATTERN='%s.'+read_config('DLEXT')[0] + flags=read_config('LDSHARED') + while flags and flags[0][0]!='-': + flags=flags[1:] + if len(flags)>1 and flags[1]=="ppc": + flags=flags[2:] + self.env.LINKFLAGS_RUBYEXT=flags + self.env.LINKFLAGS_RUBYEXT+=read_config('LIBS') + self.env.LINKFLAGS_RUBYEXT+=read_config('LIBRUBYARG_SHARED') + if Options.options.rubyarchdir: + self.env.ARCHDIR_RUBY=Options.options.rubyarchdir + else: + self.env.ARCHDIR_RUBY=read_config('sitearchdir')[0] + if Options.options.rubylibdir: + self.env.LIBDIR_RUBY=Options.options.rubylibdir + else: + self.env.LIBDIR_RUBY=read_config('sitelibdir')[0] +@conf +def check_ruby_module(self,module_name): + self.start_msg('Ruby module %s'%module_name) + try: + self.cmd_and_log([self.env['RUBY'],'-e','require \'%s\';puts 1'%module_name]) + except Exception: + self.end_msg(False) + self.fatal('Could not find the ruby module %r'%module_name) + self.end_msg(True) +@extension('.rb') +def process(self,node): + tsk=self.create_task('run_ruby',node) +class run_ruby(Task.Task): + run_str='${RUBY} ${RBFLAGS} -I ${SRC[0].parent.abspath()} ${SRC}' +def options(opt): + opt.add_option('--with-ruby-archdir',type='string',dest='rubyarchdir',help='Specify directory where to install arch specific files') + opt.add_option('--with-ruby-libdir',type='string',dest='rubylibdir',help='Specify alternate ruby library path') + opt.add_option('--with-ruby-binary',type='string',dest='rubybinary',help='Specify alternate ruby binary') diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/suncc.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/suncc.py new file mode 100644 index 0000000..48f4943 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/suncc.py @@ -0,0 +1,54 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os +from waflib import Utils +from waflib.Tools import ccroot,ar +from waflib.Configure import conf +@conf +def find_scc(conf): + v=conf.env + cc=None + if v['CC']:cc=v['CC'] + elif'CC'in conf.environ:cc=conf.environ['CC'] + if not cc:cc=conf.find_program('cc',var='CC') + if not cc:conf.fatal('Could not find a Sun C compiler') + cc=conf.cmd_to_list(cc) + try: + conf.cmd_and_log(cc+['-flags']) + except Exception: + conf.fatal('%r is not a Sun compiler'%cc) + v['CC']=cc + v['CC_NAME']='sun' + conf.get_suncc_version(cc) +@conf +def scc_common_flags(conf): + v=conf.env + v['CC_SRC_F']=[] + v['CC_TGT_F']=['-c','-o'] + if not v['LINK_CC']:v['LINK_CC']=v['CC'] + v['CCLNK_SRC_F']='' + v['CCLNK_TGT_F']=['-o'] + v['CPPPATH_ST']='-I%s' + v['DEFINES_ST']='-D%s' + v['LIB_ST']='-l%s' + v['LIBPATH_ST']='-L%s' + v['STLIB_ST']='-l%s' + v['STLIBPATH_ST']='-L%s' + v['SONAME_ST']='-Wl,-h,%s' + v['SHLIB_MARKER']='-Bdynamic' + v['STLIB_MARKER']='-Bstatic' + v['cprogram_PATTERN']='%s' + v['CFLAGS_cshlib']=['-Kpic','-DPIC'] + v['LINKFLAGS_cshlib']=['-G'] + v['cshlib_PATTERN']='lib%s.so' + v['LINKFLAGS_cstlib']=['-Bstatic'] + v['cstlib_PATTERN']='lib%s.a' +def configure(conf): + conf.find_scc() + conf.find_ar() + conf.scc_common_flags() + conf.cc_load_tools() + conf.cc_add_flags() + conf.link_add_flags() diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/suncxx.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/suncxx.py new file mode 100644 index 0000000..1736c2d --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/suncxx.py @@ -0,0 +1,55 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os +from waflib import Utils +from waflib.Tools import ccroot,ar +from waflib.Configure import conf +@conf +def find_sxx(conf): + v=conf.env + cc=None + if v['CXX']:cc=v['CXX'] + elif'CXX'in conf.environ:cc=conf.environ['CXX'] + if not cc:cc=conf.find_program('CC',var='CXX') + if not cc:cc=conf.find_program('c++',var='CXX') + if not cc:conf.fatal('Could not find a Sun C++ compiler') + cc=conf.cmd_to_list(cc) + try: + conf.cmd_and_log(cc+['-flags']) + except Exception: + conf.fatal('%r is not a Sun compiler'%cc) + v['CXX']=cc + v['CXX_NAME']='sun' + conf.get_suncc_version(cc) +@conf +def sxx_common_flags(conf): + v=conf.env + v['CXX_SRC_F']=[] + v['CXX_TGT_F']=['-c','-o'] + if not v['LINK_CXX']:v['LINK_CXX']=v['CXX'] + v['CXXLNK_SRC_F']=[] + v['CXXLNK_TGT_F']=['-o'] + v['CPPPATH_ST']='-I%s' + v['DEFINES_ST']='-D%s' + v['LIB_ST']='-l%s' + v['LIBPATH_ST']='-L%s' + v['STLIB_ST']='-l%s' + v['STLIBPATH_ST']='-L%s' + v['SONAME_ST']='-Wl,-h,%s' + v['SHLIB_MARKER']='-Bdynamic' + v['STLIB_MARKER']='-Bstatic' + v['cxxprogram_PATTERN']='%s' + v['CXXFLAGS_cxxshlib']=['-Kpic','-DPIC'] + v['LINKFLAGS_cxxshlib']=['-G'] + v['cxxshlib_PATTERN']='lib%s.so' + v['LINKFLAGS_cxxstlib']=['-Bstatic'] + v['cxxstlib_PATTERN']='lib%s.a' +def configure(conf): + conf.find_sxx() + conf.find_ar() + conf.sxx_common_flags() + conf.cxx_load_tools() + conf.cxx_add_flags() + conf.link_add_flags() diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/tex.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/tex.py new file mode 100644 index 0000000..7b17923 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/tex.py @@ -0,0 +1,253 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,re +from waflib import Utils,Task,Errors,Logs +from waflib.TaskGen import feature,before_method +re_bibunit=re.compile(r'\\(?P<type>putbib)\[(?P<file>[^\[\]]*)\]',re.M) +def bibunitscan(self): + node=self.inputs[0] + nodes=[] + if not node:return nodes + code=node.read() + for match in re_bibunit.finditer(code): + path=match.group('file') + if path: + for k in['','.bib']: + Logs.debug('tex: trying %s%s'%(path,k)) + fi=node.parent.find_resource(path+k) + if fi: + nodes.append(fi) + else: + Logs.debug('tex: could not find %s'%path) + Logs.debug("tex: found the following bibunit files: %s"%nodes) + return nodes +exts_deps_tex=['','.ltx','.tex','.bib','.pdf','.png','.eps','.ps'] +exts_tex=['.ltx','.tex'] +re_tex=re.compile(r'\\(?P<type>include|bibliography|putbib|includegraphics|input|import|bringin|lstinputlisting)(\[[^\[\]]*\])?{(?P<file>[^{}]*)}',re.M) +g_bibtex_re=re.compile('bibdata',re.M) +class tex(Task.Task): + bibtex_fun,_=Task.compile_fun('${BIBTEX} ${BIBTEXFLAGS} ${SRCFILE}',shell=False) + bibtex_fun.__doc__=""" + Execute the program **bibtex** + """ + makeindex_fun,_=Task.compile_fun('${MAKEINDEX} ${MAKEINDEXFLAGS} ${SRCFILE}',shell=False) + makeindex_fun.__doc__=""" + Execute the program **makeindex** + """ + def exec_command(self,cmd,**kw): + bld=self.generator.bld + try: + if not kw.get('cwd',None): + kw['cwd']=bld.cwd + except AttributeError: + bld.cwd=kw['cwd']=bld.variant_dir + return Utils.subprocess.Popen(cmd,**kw).wait() + def scan_aux(self,node): + nodes=[node] + re_aux=re.compile(r'\\@input{(?P<file>[^{}]*)}',re.M) + def parse_node(node): + code=node.read() + for match in re_aux.finditer(code): + path=match.group('file') + found=node.parent.find_or_declare(path) + if found and found not in nodes: + Logs.debug('tex: found aux node '+found.abspath()) + nodes.append(found) + parse_node(found) + parse_node(node) + return nodes + def scan(self): + node=self.inputs[0] + nodes=[] + names=[] + seen=[] + if not node:return(nodes,names) + def parse_node(node): + if node in seen: + return + seen.append(node) + code=node.read() + global re_tex + for match in re_tex.finditer(code): + for path in match.group('file').split(','): + if path: + add_name=True + found=None + for k in exts_deps_tex: + Logs.debug('tex: trying %s%s'%(path,k)) + found=node.parent.find_resource(path+k) + for tsk in self.generator.tasks: + if not found or found in tsk.outputs: + break + else: + nodes.append(found) + add_name=False + for ext in exts_tex: + if found.name.endswith(ext): + parse_node(found) + break + if add_name: + names.append(path) + parse_node(node) + for x in nodes: + x.parent.get_bld().mkdir() + Logs.debug("tex: found the following : %s and names %s"%(nodes,names)) + return(nodes,names) + def check_status(self,msg,retcode): + if retcode!=0: + raise Errors.WafError("%r command exit status %r"%(msg,retcode)) + def bibfile(self): + for aux_node in self.aux_nodes: + try: + ct=aux_node.read() + except(OSError,IOError): + Logs.error('Error reading %s: %r'%aux_node.abspath()) + continue + if g_bibtex_re.findall(ct): + Logs.warn('calling bibtex') + self.env.env={} + self.env.env.update(os.environ) + self.env.env.update({'BIBINPUTS':self.TEXINPUTS,'BSTINPUTS':self.TEXINPUTS}) + self.env.SRCFILE=aux_node.name[:-4] + self.check_status('error when calling bibtex',self.bibtex_fun()) + def bibunits(self): + try: + bibunits=bibunitscan(self) + except OSError: + Logs.error('error bibunitscan') + else: + if bibunits: + fn=['bu'+str(i)for i in xrange(1,len(bibunits)+1)] + if fn: + Logs.warn('calling bibtex on bibunits') + for f in fn: + self.env.env={'BIBINPUTS':self.TEXINPUTS,'BSTINPUTS':self.TEXINPUTS} + self.env.SRCFILE=f + self.check_status('error when calling bibtex',self.bibtex_fun()) + def makeindex(self): + try: + idx_path=self.idx_node.abspath() + os.stat(idx_path) + except OSError: + Logs.warn('index file %s absent, not calling makeindex'%idx_path) + else: + Logs.warn('calling makeindex') + self.env.SRCFILE=self.idx_node.name + self.env.env={} + self.check_status('error when calling makeindex %s'%idx_path,self.makeindex_fun()) + def bibtopic(self): + p=self.inputs[0].parent.get_bld() + if os.path.exists(os.path.join(p.abspath(),'btaux.aux')): + self.aux_nodes+=p.ant_glob('*[0-9].aux') + def run(self): + env=self.env + if not env['PROMPT_LATEX']: + env.append_value('LATEXFLAGS','-interaction=batchmode') + env.append_value('PDFLATEXFLAGS','-interaction=batchmode') + env.append_value('XELATEXFLAGS','-interaction=batchmode') + fun=self.texfun + node=self.inputs[0] + srcfile=node.abspath() + texinputs=self.env.TEXINPUTS or'' + self.TEXINPUTS=node.parent.get_bld().abspath()+os.pathsep+node.parent.get_src().abspath()+os.pathsep+texinputs+os.pathsep + self.cwd=self.inputs[0].parent.get_bld().abspath() + Logs.warn('first pass on %s'%self.__class__.__name__) + self.env.env={} + self.env.env.update(os.environ) + self.env.env.update({'TEXINPUTS':self.TEXINPUTS}) + self.env.SRCFILE=srcfile + self.check_status('error when calling latex',fun()) + self.aux_nodes=self.scan_aux(node.change_ext('.aux')) + self.idx_node=node.change_ext('.idx') + self.bibtopic() + self.bibfile() + self.bibunits() + self.makeindex() + hash='' + for i in range(10): + prev_hash=hash + try: + hashes=[Utils.h_file(x.abspath())for x in self.aux_nodes] + hash=Utils.h_list(hashes) + except(OSError,IOError): + Logs.error('could not read aux.h') + pass + if hash and hash==prev_hash: + break + Logs.warn('calling %s'%self.__class__.__name__) + self.env.env={} + self.env.env.update(os.environ) + self.env.env.update({'TEXINPUTS':self.TEXINPUTS}) + self.env.SRCFILE=srcfile + self.check_status('error when calling %s'%self.__class__.__name__,fun()) +class latex(tex): + texfun,vars=Task.compile_fun('${LATEX} ${LATEXFLAGS} ${SRCFILE}',shell=False) +class pdflatex(tex): + texfun,vars=Task.compile_fun('${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}',shell=False) +class xelatex(tex): + texfun,vars=Task.compile_fun('${XELATEX} ${XELATEXFLAGS} ${SRCFILE}',shell=False) +class dvips(Task.Task): + run_str='${DVIPS} ${DVIPSFLAGS} ${SRC} -o ${TGT}' + color='BLUE' + after=['latex','pdflatex','xelatex'] +class dvipdf(Task.Task): + run_str='${DVIPDF} ${DVIPDFFLAGS} ${SRC} ${TGT}' + color='BLUE' + after=['latex','pdflatex','xelatex'] +class pdf2ps(Task.Task): + run_str='${PDF2PS} ${PDF2PSFLAGS} ${SRC} ${TGT}' + color='BLUE' + after=['latex','pdflatex','xelatex'] +@feature('tex') +@before_method('process_source') +def apply_tex(self): + if not getattr(self,'type',None)in['latex','pdflatex','xelatex']: + self.type='pdflatex' + tree=self.bld + outs=Utils.to_list(getattr(self,'outs',[])) + self.env['PROMPT_LATEX']=getattr(self,'prompt',1) + deps_lst=[] + if getattr(self,'deps',None): + deps=self.to_list(self.deps) + for filename in deps: + n=self.path.find_resource(filename) + if not n: + self.bld.fatal('Could not find %r for %r'%(filename,self)) + if not n in deps_lst: + deps_lst.append(n) + for node in self.to_nodes(self.source): + if self.type=='latex': + task=self.create_task('latex',node,node.change_ext('.dvi')) + elif self.type=='pdflatex': + task=self.create_task('pdflatex',node,node.change_ext('.pdf')) + elif self.type=='xelatex': + task=self.create_task('xelatex',node,node.change_ext('.pdf')) + task.env=self.env + if deps_lst: + for n in deps_lst: + if not n in task.dep_nodes: + task.dep_nodes.append(n) + v=dict(os.environ) + p=node.parent.abspath()+os.pathsep+self.path.abspath()+os.pathsep+self.path.get_bld().abspath()+os.pathsep+v.get('TEXINPUTS','')+os.pathsep + v['TEXINPUTS']=p + if self.type=='latex': + if'ps'in outs: + tsk=self.create_task('dvips',task.outputs,node.change_ext('.ps')) + tsk.env.env=dict(v) + if'pdf'in outs: + tsk=self.create_task('dvipdf',task.outputs,node.change_ext('.pdf')) + tsk.env.env=dict(v) + elif self.type=='pdflatex': + if'ps'in outs: + self.create_task('pdf2ps',task.outputs,node.change_ext('.ps')) + self.source=[] +def configure(self): + v=self.env + for p in'tex latex pdflatex xelatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps'.split(): + try: + self.find_program(p,var=p.upper()) + except self.errors.ConfigurationError: + pass + v['DVIPSFLAGS']='-Ppdf' diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/vala.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/vala.py new file mode 100644 index 0000000..96248c1 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/vala.py @@ -0,0 +1,201 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os.path,shutil,re +from waflib import Context,Task,Utils,Logs,Options,Errors +from waflib.TaskGen import extension,taskgen_method +from waflib.Configure import conf +class valac(Task.Task): + vars=["VALAC","VALAC_VERSION","VALAFLAGS"] + ext_out=['.h'] + def run(self): + cmd=[self.env['VALAC']]+self.env['VALAFLAGS'] + cmd.extend([a.abspath()for a in self.inputs]) + ret=self.exec_command(cmd,cwd=self.outputs[0].parent.abspath()) + if ret: + return ret + for x in self.outputs: + if id(x.parent)!=id(self.outputs[0].parent): + shutil.move(self.outputs[0].parent.abspath()+os.sep+x.name,x.abspath()) + if self.generator.dump_deps_node: + self.generator.dump_deps_node.write('\n'.join(self.generator.packages)) + return ret +valac=Task.update_outputs(valac) +@taskgen_method +def init_vala_task(self): + self.profile=getattr(self,'profile','gobject') + if self.profile=='gobject': + self.uselib=Utils.to_list(getattr(self,'uselib',[])) + if not'GOBJECT'in self.uselib: + self.uselib.append('GOBJECT') + def addflags(flags): + self.env.append_value('VALAFLAGS',flags) + if self.profile: + addflags('--profile=%s'%self.profile) + if hasattr(self,'threading'): + if self.profile=='gobject': + if not'GTHREAD'in self.uselib: + self.uselib.append('GTHREAD') + else: + Logs.warn("Profile %s means no threading support"%self.profile) + self.threading=False + if self.threading: + addflags('--threading') + valatask=self.valatask + self.is_lib='cprogram'not in self.features + if self.is_lib: + addflags('--library=%s'%self.target) + h_node=self.path.find_or_declare('%s.h'%self.target) + valatask.outputs.append(h_node) + addflags('--header=%s'%h_node.name) + valatask.outputs.append(self.path.find_or_declare('%s.vapi'%self.target)) + if getattr(self,'gir',None): + gir_node=self.path.find_or_declare('%s.gir'%self.gir) + addflags('--gir=%s'%gir_node.name) + valatask.outputs.append(gir_node) + self.vala_target_glib=getattr(self,'vala_target_glib',getattr(Options.options,'vala_target_glib',None)) + if self.vala_target_glib: + addflags('--target-glib=%s'%self.vala_target_glib) + addflags(['--define=%s'%x for x in getattr(self,'vala_defines',[])]) + packages_private=Utils.to_list(getattr(self,'packages_private',[])) + addflags(['--pkg=%s'%x for x in packages_private]) + def _get_api_version(): + api_version='1.0' + if hasattr(Context.g_module,'API_VERSION'): + version=Context.g_module.API_VERSION.split(".") + if version[0]=="0": + api_version="0."+version[1] + else: + api_version=version[0]+".0" + return api_version + self.includes=Utils.to_list(getattr(self,'includes',[])) + self.uselib=self.to_list(getattr(self,'uselib',[])) + valatask.install_path=getattr(self,'install_path','') + valatask.vapi_path=getattr(self,'vapi_path','${DATAROOTDIR}/vala/vapi') + valatask.pkg_name=getattr(self,'pkg_name',self.env['PACKAGE']) + valatask.header_path=getattr(self,'header_path','${INCLUDEDIR}/%s-%s'%(valatask.pkg_name,_get_api_version())) + valatask.install_binding=getattr(self,'install_binding',True) + self.packages=packages=Utils.to_list(getattr(self,'packages',[])) + self.vapi_dirs=vapi_dirs=Utils.to_list(getattr(self,'vapi_dirs',[])) + includes=[] + if hasattr(self,'use'): + local_packages=Utils.to_list(self.use)[:] + seen=[] + while len(local_packages)>0: + package=local_packages.pop() + if package in seen: + continue + seen.append(package) + try: + package_obj=self.bld.get_tgen_by_name(package) + except Errors.WafError: + continue + package_name=package_obj.target + package_node=package_obj.path + package_dir=package_node.path_from(self.path) + for task in package_obj.tasks: + for output in task.outputs: + if output.name==package_name+".vapi": + valatask.set_run_after(task) + if package_name not in packages: + packages.append(package_name) + if package_dir not in vapi_dirs: + vapi_dirs.append(package_dir) + if package_dir not in includes: + includes.append(package_dir) + if hasattr(package_obj,'use'): + lst=self.to_list(package_obj.use) + lst.reverse() + local_packages=[pkg for pkg in lst if pkg not in seen]+local_packages + addflags(['--pkg=%s'%p for p in packages]) + for vapi_dir in vapi_dirs: + v_node=self.path.find_dir(vapi_dir) + if not v_node: + Logs.warn('Unable to locate Vala API directory: %r'%vapi_dir) + else: + addflags('--vapidir=%s'%v_node.abspath()) + addflags('--vapidir=%s'%v_node.get_bld().abspath()) + self.dump_deps_node=None + if self.is_lib and self.packages: + self.dump_deps_node=self.path.find_or_declare('%s.deps'%self.target) + valatask.outputs.append(self.dump_deps_node) + self.includes.append(self.bld.srcnode.abspath()) + self.includes.append(self.bld.bldnode.abspath()) + for include in includes: + try: + self.includes.append(self.path.find_dir(include).abspath()) + self.includes.append(self.path.find_dir(include).get_bld().abspath()) + except AttributeError: + Logs.warn("Unable to locate include directory: '%s'"%include) + if self.is_lib and valatask.install_binding: + headers_list=[o for o in valatask.outputs if o.suffix()==".h"] + try: + self.install_vheader.source=headers_list + except AttributeError: + self.install_vheader=self.bld.install_files(valatask.header_path,headers_list,self.env) + vapi_list=[o for o in valatask.outputs if(o.suffix()in(".vapi",".deps"))] + try: + self.install_vapi.source=vapi_list + except AttributeError: + self.install_vapi=self.bld.install_files(valatask.vapi_path,vapi_list,self.env) + gir_list=[o for o in valatask.outputs if o.suffix()=='.gir'] + try: + self.install_gir.source=gir_list + except AttributeError: + self.install_gir=self.bld.install_files(getattr(self,'gir_path','${DATAROOTDIR}/gir-1.0'),gir_list,self.env) +@extension('.vala','.gs') +def vala_file(self,node): + try: + valatask=self.valatask + except AttributeError: + valatask=self.valatask=self.create_task('valac') + self.init_vala_task() + valatask.inputs.append(node) + c_node=node.change_ext('.c') + valatask.outputs.append(c_node) + self.source.append(c_node) +@conf +def find_valac(self,valac_name,min_version): + valac=self.find_program(valac_name,var='VALAC') + try: + output=self.cmd_and_log(valac+' --version') + except Exception: + valac_version=None + else: + ver=re.search(r'\d+.\d+.\d+',output).group(0).split('.') + valac_version=tuple([int(x)for x in ver]) + self.msg('Checking for %s version >= %r'%(valac_name,min_version),valac_version,valac_version and valac_version>=min_version) + if valac and valac_version<min_version: + self.fatal("%s version %r is too old, need >= %r"%(valac_name,valac_version,min_version)) + self.env['VALAC_VERSION']=valac_version + return valac +@conf +def check_vala(self,min_version=(0,8,0),branch=None): + if not branch: + branch=min_version[:2] + try: + find_valac(self,'valac-%d.%d'%(branch[0],branch[1]),min_version) + except self.errors.ConfigurationError: + find_valac(self,'valac',min_version) +@conf +def check_vala_deps(self): + if not self.env['HAVE_GOBJECT']: + pkg_args={'package':'gobject-2.0','uselib_store':'GOBJECT','args':'--cflags --libs'} + if getattr(Options.options,'vala_target_glib',None): + pkg_args['atleast_version']=Options.options.vala_target_glib + self.check_cfg(**pkg_args) + if not self.env['HAVE_GTHREAD']: + pkg_args={'package':'gthread-2.0','uselib_store':'GTHREAD','args':'--cflags --libs'} + if getattr(Options.options,'vala_target_glib',None): + pkg_args['atleast_version']=Options.options.vala_target_glib + self.check_cfg(**pkg_args) +def configure(self): + self.load('gnu_dirs') + self.check_vala_deps() + self.check_vala() + self.env.VALAFLAGS=['-C','--quiet'] +def options(opt): + opt.load('gnu_dirs') + valaopts=opt.add_option_group('Vala Compiler Options') + valaopts.add_option('--vala-target-glib',default=None,dest='vala_target_glib',metavar='MAJOR.MINOR',help='Target version of glib for Vala GObject code generation') diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/waf_unit_test.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/waf_unit_test.py new file mode 100644 index 0000000..3363172 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/waf_unit_test.py @@ -0,0 +1,97 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,sys +from waflib.TaskGen import feature,after_method +from waflib import Utils,Task,Logs,Options +testlock=Utils.threading.Lock() +@feature('test') +@after_method('apply_link') +def make_test(self): + if getattr(self,'link_task',None): + self.create_task('utest',self.link_task.outputs) +class utest(Task.Task): + color='PINK' + after=['vnum','inst'] + vars=[] + def runnable_status(self): + if getattr(Options.options,'no_tests',False): + return Task.SKIP_ME + ret=super(utest,self).runnable_status() + if ret==Task.SKIP_ME: + if getattr(Options.options,'all_tests',False): + return Task.RUN_ME + return ret + def run(self): + filename=self.inputs[0].abspath() + self.ut_exec=getattr(self.generator,'ut_exec',[filename]) + if getattr(self.generator,'ut_fun',None): + self.generator.ut_fun(self) + try: + fu=getattr(self.generator.bld,'all_test_paths') + except AttributeError: + fu=os.environ.copy() + lst=[] + for g in self.generator.bld.groups: + for tg in g: + if getattr(tg,'link_task',None): + s=tg.link_task.outputs[0].parent.abspath() + if s not in lst: + lst.append(s) + def add_path(dct,path,var): + dct[var]=os.pathsep.join(Utils.to_list(path)+[os.environ.get(var,'')]) + if Utils.is_win32: + add_path(fu,lst,'PATH') + elif Utils.unversioned_sys_platform()=='darwin': + add_path(fu,lst,'DYLD_LIBRARY_PATH') + add_path(fu,lst,'LD_LIBRARY_PATH') + else: + add_path(fu,lst,'LD_LIBRARY_PATH') + self.generator.bld.all_test_paths=fu + cwd=getattr(self.generator,'ut_cwd','')or self.inputs[0].parent.abspath() + testcmd=getattr(Options.options,'testcmd',False) + if testcmd: + self.ut_exec=(testcmd%self.ut_exec[0]).split(' ') + proc=Utils.subprocess.Popen(self.ut_exec,cwd=cwd,env=fu,stderr=Utils.subprocess.PIPE,stdout=Utils.subprocess.PIPE) + (stdout,stderr)=proc.communicate() + tup=(filename,proc.returncode,stdout,stderr) + self.generator.utest_result=tup + testlock.acquire() + try: + bld=self.generator.bld + Logs.debug("ut: %r",tup) + try: + bld.utest_results.append(tup) + except AttributeError: + bld.utest_results=[tup] + finally: + testlock.release() +def summary(bld): + lst=getattr(bld,'utest_results',[]) + if lst: + Logs.pprint('CYAN','execution summary') + total=len(lst) + tfail=len([x for x in lst if x[1]]) + Logs.pprint('CYAN',' tests that pass %d/%d'%(total-tfail,total)) + for(f,code,out,err)in lst: + if not code: + Logs.pprint('CYAN',' %s'%f) + Logs.pprint('CYAN',' tests that fail %d/%d'%(tfail,total)) + for(f,code,out,err)in lst: + if code: + Logs.pprint('CYAN',' %s'%f) +def set_exit_code(bld): + lst=getattr(bld,'utest_results',[]) + for(f,code,out,err)in lst: + if code: + msg=[] + if out: + msg.append('stdout:%s%s'%(os.linesep,out.decode('utf-8'))) + if err: + msg.append('stderr:%s%s'%(os.linesep,err.decode('utf-8'))) + bld.fatal(os.linesep.join(msg)) +def options(opt): + opt.add_option('--notests',action='store_true',default=False,help='Exec no unit tests',dest='no_tests') + opt.add_option('--alltests',action='store_true',default=False,help='Exec all unit tests',dest='all_tests') + opt.add_option('--testcmd',action='store',default=False,help='Run the unit tests using the test-cmd string'' example "--test-cmd="valgrind --error-exitcode=1'' %s" to run under valgrind',dest='testcmd') diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/winres.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/winres.py new file mode 100644 index 0000000..88904af --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/winres.py @@ -0,0 +1,85 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import re,traceback +from waflib import Task,Logs,Utils +from waflib.TaskGen import extension +from waflib.Tools import c_preproc +@extension('.rc') +def rc_file(self,node): + obj_ext='.rc.o' + if self.env['WINRC_TGT_F']=='/fo': + obj_ext='.res' + rctask=self.create_task('winrc',node,node.change_ext(obj_ext)) + try: + self.compiled_tasks.append(rctask) + except AttributeError: + self.compiled_tasks=[rctask] +re_lines=re.compile('(?:^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*?)\s*$)|''(?:^\w+[ \t]*(ICON|BITMAP|CURSOR|HTML|FONT|MESSAGETABLE|TYPELIB|REGISTRY|D3DFX)[ \t]*(.*?)\s*$)',re.IGNORECASE|re.MULTILINE) +class rc_parser(c_preproc.c_parser): + def filter_comments(self,filepath): + code=Utils.readf(filepath) + if c_preproc.use_trigraphs: + for(a,b)in c_preproc.trig_def:code=code.split(a).join(b) + code=c_preproc.re_nl.sub('',code) + code=c_preproc.re_cpp.sub(c_preproc.repl,code) + ret=[] + for m in re.finditer(re_lines,code): + if m.group(2): + ret.append((m.group(2),m.group(3))) + else: + ret.append(('include',m.group(5))) + return ret + def addlines(self,node): + self.currentnode_stack.append(node.parent) + filepath=node.abspath() + self.count_files+=1 + if self.count_files>c_preproc.recursion_limit: + raise c_preproc.PreprocError("recursion limit exceeded") + pc=self.parse_cache + Logs.debug('preproc: reading file %r',filepath) + try: + lns=pc[filepath] + except KeyError: + pass + else: + self.lines.extend(lns) + return + try: + lines=self.filter_comments(filepath) + lines.append((c_preproc.POPFILE,'')) + lines.reverse() + pc[filepath]=lines + self.lines.extend(lines) + except IOError: + raise c_preproc.PreprocError("could not read the file %s"%filepath) + except Exception: + if Logs.verbose>0: + Logs.error("parsing %s failed"%filepath) + traceback.print_exc() +class winrc(Task.Task): + run_str='${WINRC} ${WINRCFLAGS} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${WINRC_TGT_F} ${TGT} ${WINRC_SRC_F} ${SRC}' + color='BLUE' + def scan(self): + tmp=rc_parser(self.generator.includes_nodes) + tmp.start(self.inputs[0],self.env) + nodes=tmp.nodes + names=tmp.names + if Logs.verbose: + Logs.debug('deps: deps for %s: %r; unresolved %r'%(str(self),nodes,names)) + return(nodes,names) +def configure(conf): + v=conf.env + v['WINRC_TGT_F']='-o' + v['WINRC_SRC_F']='-i' + if not conf.env.WINRC: + if v.CC_NAME=='msvc': + conf.find_program('RC',var='WINRC',path_list=v['PATH']) + v['WINRC_TGT_F']='/fo' + v['WINRC_SRC_F']='' + else: + conf.find_program('windres',var='WINRC',path_list=v['PATH']) + if not conf.env.WINRC: + conf.fatal('winrc was not found!') + v['WINRCFLAGS']=[] diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/xlc.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/xlc.py new file mode 100644 index 0000000..fbf0fcf --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/xlc.py @@ -0,0 +1,45 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +from waflib.Tools import ccroot,ar +from waflib.Configure import conf +@conf +def find_xlc(conf): + cc=conf.find_program(['xlc_r','xlc'],var='CC') + cc=conf.cmd_to_list(cc) + conf.get_xlc_version(cc) + conf.env.CC_NAME='xlc' + conf.env.CC=cc +@conf +def xlc_common_flags(conf): + v=conf.env + v['CC_SRC_F']=[] + v['CC_TGT_F']=['-c','-o'] + if not v['LINK_CC']:v['LINK_CC']=v['CC'] + v['CCLNK_SRC_F']=[] + v['CCLNK_TGT_F']=['-o'] + v['CPPPATH_ST']='-I%s' + v['DEFINES_ST']='-D%s' + v['LIB_ST']='-l%s' + v['LIBPATH_ST']='-L%s' + v['STLIB_ST']='-l%s' + v['STLIBPATH_ST']='-L%s' + v['RPATH_ST']='-Wl,-rpath,%s' + v['SONAME_ST']=[] + v['SHLIB_MARKER']=[] + v['STLIB_MARKER']=[] + v['LINKFLAGS_cprogram']=['-Wl,-brtl'] + v['cprogram_PATTERN']='%s' + v['CFLAGS_cshlib']=['-fPIC'] + v['LINKFLAGS_cshlib']=['-G','-Wl,-brtl,-bexpfull'] + v['cshlib_PATTERN']='lib%s.so' + v['LINKFLAGS_cstlib']=[] + v['cstlib_PATTERN']='lib%s.a' +def configure(conf): + conf.find_xlc() + conf.find_ar() + conf.xlc_common_flags() + conf.cc_load_tools() + conf.cc_add_flags() + conf.link_add_flags() diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/xlcxx.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/xlcxx.py new file mode 100644 index 0000000..b7efb23 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/xlcxx.py @@ -0,0 +1,45 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +from waflib.Tools import ccroot,ar +from waflib.Configure import conf +@conf +def find_xlcxx(conf): + cxx=conf.find_program(['xlc++_r','xlc++'],var='CXX') + cxx=conf.cmd_to_list(cxx) + conf.get_xlc_version(cxx) + conf.env.CXX_NAME='xlc++' + conf.env.CXX=cxx +@conf +def xlcxx_common_flags(conf): + v=conf.env + v['CXX_SRC_F']=[] + v['CXX_TGT_F']=['-c','-o'] + if not v['LINK_CXX']:v['LINK_CXX']=v['CXX'] + v['CXXLNK_SRC_F']=[] + v['CXXLNK_TGT_F']=['-o'] + v['CPPPATH_ST']='-I%s' + v['DEFINES_ST']='-D%s' + v['LIB_ST']='-l%s' + v['LIBPATH_ST']='-L%s' + v['STLIB_ST']='-l%s' + v['STLIBPATH_ST']='-L%s' + v['RPATH_ST']='-Wl,-rpath,%s' + v['SONAME_ST']=[] + v['SHLIB_MARKER']=[] + v['STLIB_MARKER']=[] + v['LINKFLAGS_cxxprogram']=['-Wl,-brtl'] + v['cxxprogram_PATTERN']='%s' + v['CXXFLAGS_cxxshlib']=['-fPIC'] + v['LINKFLAGS_cxxshlib']=['-G','-Wl,-brtl,-bexpfull'] + v['cxxshlib_PATTERN']='lib%s.so' + v['LINKFLAGS_cxxstlib']=[] + v['cxxstlib_PATTERN']='lib%s.a' +def configure(conf): + conf.find_xlcxx() + conf.find_ar() + conf.xlcxx_common_flags() + conf.cxx_load_tools() + conf.cxx_add_flags() + conf.link_add_flags() diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Utils.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Utils.py new file mode 100644 index 0000000..92cfd81 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Utils.py @@ -0,0 +1,412 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,sys,errno,traceback,inspect,re,shutil,datetime,gc +import subprocess +try: + from collections import deque +except ImportError: + class deque(list): + def popleft(self): + return self.pop(0) +try: + import _winreg as winreg +except ImportError: + try: + import winreg + except ImportError: + winreg=None +from waflib import Errors +try: + from collections import UserDict +except ImportError: + from UserDict import UserDict +try: + from hashlib import md5 +except ImportError: + try: + from md5 import md5 + except ImportError: + pass +try: + import threading +except ImportError: + class threading(object): + pass + class Lock(object): + def acquire(self): + pass + def release(self): + pass + threading.Lock=threading.Thread=Lock +else: + run_old=threading.Thread.run + def run(*args,**kwargs): + try: + run_old(*args,**kwargs) + except(KeyboardInterrupt,SystemExit): + raise + except Exception: + sys.excepthook(*sys.exc_info()) + threading.Thread.run=run +SIG_NIL='iluvcuteoverload' +O644=420 +O755=493 +rot_chr=['\\','|','/','-'] +rot_idx=0 +try: + from collections import defaultdict +except ImportError: + class defaultdict(dict): + def __init__(self,default_factory): + super(defaultdict,self).__init__() + self.default_factory=default_factory + def __getitem__(self,key): + try: + return super(defaultdict,self).__getitem__(key) + except KeyError: + value=self.default_factory() + self[key]=value + return value +is_win32=sys.platform in('win32','cli') +indicator='\x1b[K%s%s%s\r' +if is_win32 and'NOCOLOR'in os.environ: + indicator='%s%s%s\r' +def readf(fname,m='r',encoding='ISO8859-1'): + if sys.hexversion>0x3000000 and not'b'in m: + m+='b' + f=open(fname,m) + try: + txt=f.read() + finally: + f.close() + txt=txt.decode(encoding) + else: + f=open(fname,m) + try: + txt=f.read() + finally: + f.close() + return txt +def writef(fname,data,m='w',encoding='ISO8859-1'): + if sys.hexversion>0x3000000 and not'b'in m: + data=data.encode(encoding) + m+='b' + f=open(fname,m) + try: + f.write(data) + finally: + f.close() +def h_file(fname): + f=open(fname,'rb') + m=md5() + try: + while fname: + fname=f.read(200000) + m.update(fname) + finally: + f.close() + return m.digest() +if hasattr(os,'O_NOINHERIT')and sys.hexversion<0x3040000: + def readf_win32(f,m='r',encoding='ISO8859-1'): + flags=os.O_NOINHERIT|os.O_RDONLY + if'b'in m: + flags|=os.O_BINARY + if'+'in m: + flags|=os.O_RDWR + try: + fd=os.open(f,flags) + except OSError: + raise IOError('Cannot read from %r'%f) + if sys.hexversion>0x3000000 and not'b'in m: + m+='b' + f=os.fdopen(fd,m) + try: + txt=f.read() + finally: + f.close() + txt=txt.decode(encoding) + else: + f=os.fdopen(fd,m) + try: + txt=f.read() + finally: + f.close() + return txt + def writef_win32(f,data,m='w',encoding='ISO8859-1'): + if sys.hexversion>0x3000000 and not'b'in m: + data=data.encode(encoding) + m+='b' + flags=os.O_CREAT|os.O_TRUNC|os.O_WRONLY|os.O_NOINHERIT + if'b'in m: + flags|=os.O_BINARY + if'+'in m: + flags|=os.O_RDWR + try: + fd=os.open(f,flags) + except OSError: + raise IOError('Cannot write to %r'%f) + f=os.fdopen(fd,m) + try: + f.write(data) + finally: + f.close() + def h_file_win32(fname): + try: + fd=os.open(fname,os.O_BINARY|os.O_RDONLY|os.O_NOINHERIT) + except OSError: + raise IOError('Cannot read from %r'%fname) + f=os.fdopen(fd,'rb') + m=md5() + try: + while fname: + fname=f.read(200000) + m.update(fname) + finally: + f.close() + return m.digest() + readf_old=readf + writef_old=writef + h_file_old=h_file + readf=readf_win32 + writef=writef_win32 + h_file=h_file_win32 +try: + x=''.encode('hex') +except LookupError: + import binascii + def to_hex(s): + ret=binascii.hexlify(s) + if not isinstance(ret,str): + ret=ret.decode('utf-8') + return ret +else: + def to_hex(s): + return s.encode('hex') +to_hex.__doc__=""" +Return the hexadecimal representation of a string + +:param s: string to convert +:type s: string +""" +listdir=os.listdir +if is_win32: + def listdir_win32(s): + if not s: + try: + import ctypes + except ImportError: + return[x+':\\'for x in list('ABCDEFGHIJKLMNOPQRSTUVWXYZ')] + else: + dlen=4 + maxdrives=26 + buf=ctypes.create_string_buffer(maxdrives*dlen) + ndrives=ctypes.windll.kernel32.GetLogicalDriveStringsA(maxdrives*dlen,ctypes.byref(buf)) + return[str(buf.raw[4*i:4*i+2].decode('ascii'))for i in range(int(ndrives/dlen))] + if len(s)==2 and s[1]==":": + s+=os.sep + if not os.path.isdir(s): + e=OSError('%s is not a directory'%s) + e.errno=errno.ENOENT + raise e + return os.listdir(s) + listdir=listdir_win32 +def num2ver(ver): + if isinstance(ver,str): + ver=tuple(ver.split('.')) + if isinstance(ver,tuple): + ret=0 + for i in range(4): + if i<len(ver): + ret+=256**(3-i)*int(ver[i]) + return ret + return ver +def ex_stack(): + exc_type,exc_value,tb=sys.exc_info() + exc_lines=traceback.format_exception(exc_type,exc_value,tb) + return''.join(exc_lines) +def to_list(sth): + if isinstance(sth,str): + return sth.split() + else: + return sth +re_nl=re.compile('\r*\n',re.M) +def str_to_dict(txt): + tbl={} + lines=re_nl.split(txt) + for x in lines: + x=x.strip() + if not x or x.startswith('#')or x.find('=')<0: + continue + tmp=x.split('=') + tbl[tmp[0].strip()]='='.join(tmp[1:]).strip() + return tbl +def split_path(path): + return path.split('/') +def split_path_cygwin(path): + if path.startswith('//'): + ret=path.split('/')[2:] + ret[0]='/'+ret[0] + return ret + return path.split('/') +re_sp=re.compile('[/\\\\]') +def split_path_win32(path): + if path.startswith('\\\\'): + ret=re.split(re_sp,path)[2:] + ret[0]='\\'+ret[0] + return ret + return re.split(re_sp,path) +if sys.platform=='cygwin': + split_path=split_path_cygwin +elif is_win32: + split_path=split_path_win32 +split_path.__doc__=""" +Split a path by / or \\. This function is not like os.path.split + +:type path: string +:param path: path to split +:return: list of strings +""" +def check_dir(path): + if not os.path.isdir(path): + try: + os.makedirs(path) + except OSError ,e: + if not os.path.isdir(path): + raise Errors.WafError('Cannot create the folder %r'%path,ex=e) +def def_attrs(cls,**kw): + for k,v in kw.items(): + if not hasattr(cls,k): + setattr(cls,k,v) +def quote_define_name(s): + fu=re.compile("[^a-zA-Z0-9]").sub("_",s) + fu=fu.upper() + return fu +def h_list(lst): + m=md5() + m.update(str(lst)) + return m.digest() +def h_fun(fun): + try: + return fun.code + except AttributeError: + try: + h=inspect.getsource(fun) + except IOError: + h="nocode" + try: + fun.code=h + except AttributeError: + pass + return h +reg_subst=re.compile(r"(\\\\)|(\$\$)|\$\{([^}]+)\}") +def subst_vars(expr,params): + def repl_var(m): + if m.group(1): + return'\\' + if m.group(2): + return'$' + try: + return params.get_flat(m.group(3)) + except AttributeError: + return params[m.group(3)] + return reg_subst.sub(repl_var,expr) +def destos_to_binfmt(key): + if key=='darwin': + return'mac-o' + elif key in('win32','cygwin','uwin','msys'): + return'pe' + return'elf' +def unversioned_sys_platform(): + s=sys.platform + if s=='java': + from java.lang import System + s=System.getProperty('os.name') + if s=='Mac OS X': + return'darwin' + elif s.startswith('Windows '): + return'win32' + elif s=='OS/2': + return'os2' + elif s=='HP-UX': + return'hpux' + elif s in('SunOS','Solaris'): + return'sunos' + else:s=s.lower() + if s=='powerpc': + return'darwin' + if s=='win32'or s.endswith('os2')and s!='sunos2':return s + return re.split('\d+$',s)[0] +def nada(*k,**kw): + pass +class Timer(object): + def __init__(self): + self.start_time=datetime.datetime.utcnow() + def __str__(self): + delta=datetime.datetime.utcnow()-self.start_time + days=int(delta.days) + hours=delta.seconds//3600 + minutes=(delta.seconds-hours*3600)//60 + seconds=delta.seconds-hours*3600-minutes*60+float(delta.microseconds)/1000/1000 + result='' + if days: + result+='%dd'%days + if days or hours: + result+='%dh'%hours + if days or hours or minutes: + result+='%dm'%minutes + return'%s%.3fs'%(result,seconds) +if is_win32: + old=shutil.copy2 + def copy2(src,dst): + old(src,dst) + shutil.copystat(src,dst) + setattr(shutil,'copy2',copy2) +if os.name=='java': + try: + gc.disable() + gc.enable() + except NotImplementedError: + gc.disable=gc.enable +def read_la_file(path): + sp=re.compile(r'^([^=]+)=\'(.*)\'$') + dc={} + for line in readf(path).splitlines(): + try: + _,left,right,_=sp.split(line.strip()) + dc[left]=right + except ValueError: + pass + return dc +def nogc(fun): + def f(*k,**kw): + try: + gc.disable() + ret=fun(*k,**kw) + finally: + gc.enable() + return ret + f.__doc__=fun.__doc__ + return f +def run_once(fun): + cache={} + def wrap(k): + try: + return cache[k] + except KeyError: + ret=fun(k) + cache[k]=ret + return ret + wrap.__cache__=cache + return wrap +def get_registry_app_path(key,filename): + if not winreg: + return None + try: + result=winreg.QueryValue(key,"Software\\Microsoft\\Windows\\CurrentVersion\\App Paths\\%s.exe"%filename[0]) + except WindowsError: + pass + else: + if os.path.isfile(result): + return result diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Utils.pyc b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Utils.pyc Binary files differnew file mode 100644 index 0000000..0dd8845 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Utils.pyc diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__init__.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__init__.py new file mode 100644 index 0000000..efeed79 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__init__.py @@ -0,0 +1,4 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__init__.pyc b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__init__.pyc Binary files differnew file mode 100644 index 0000000..5db2af6 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__init__.pyc diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/ansiterm.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/ansiterm.py new file mode 100644 index 0000000..a959e12 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/ansiterm.py @@ -0,0 +1,191 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import sys,os +try: + if not(sys.stderr.isatty()and sys.stdout.isatty()): + raise ValueError('not a tty') + from ctypes import Structure,windll,c_short,c_ushort,c_ulong,c_int,byref,POINTER,c_long,c_wchar + class COORD(Structure): + _fields_=[("X",c_short),("Y",c_short)] + class SMALL_RECT(Structure): + _fields_=[("Left",c_short),("Top",c_short),("Right",c_short),("Bottom",c_short)] + class CONSOLE_SCREEN_BUFFER_INFO(Structure): + _fields_=[("Size",COORD),("CursorPosition",COORD),("Attributes",c_short),("Window",SMALL_RECT),("MaximumWindowSize",COORD)] + class CONSOLE_CURSOR_INFO(Structure): + _fields_=[('dwSize',c_ulong),('bVisible',c_int)] + windll.kernel32.GetStdHandle.argtypes=[c_ulong] + windll.kernel32.GetStdHandle.restype=c_ulong + windll.kernel32.GetConsoleScreenBufferInfo.argtypes=[c_ulong,POINTER(CONSOLE_SCREEN_BUFFER_INFO)] + windll.kernel32.GetConsoleScreenBufferInfo.restype=c_long + windll.kernel32.SetConsoleTextAttribute.argtypes=[c_ulong,c_ushort] + windll.kernel32.SetConsoleTextAttribute.restype=c_long + windll.kernel32.FillConsoleOutputCharacterW.argtypes=[c_ulong,c_wchar,c_ulong,POINTER(COORD),POINTER(c_ulong)] + windll.kernel32.FillConsoleOutputCharacterW.restype=c_long + windll.kernel32.FillConsoleOutputAttribute.argtypes=[c_ulong,c_ushort,c_ulong,POINTER(COORD),POINTER(c_ulong)] + windll.kernel32.FillConsoleOutputAttribute.restype=c_long + windll.kernel32.SetConsoleCursorPosition.argtypes=[c_ulong,POINTER(COORD)] + windll.kernel32.SetConsoleCursorPosition.restype=c_long + windll.kernel32.SetConsoleCursorInfo.argtypes=[c_ulong,POINTER(CONSOLE_CURSOR_INFO)] + windll.kernel32.SetConsoleCursorInfo.restype=c_long + sbinfo=CONSOLE_SCREEN_BUFFER_INFO() + csinfo=CONSOLE_CURSOR_INFO() + hconsole=windll.kernel32.GetStdHandle(-11) + windll.kernel32.GetConsoleScreenBufferInfo(hconsole,byref(sbinfo)) + if sbinfo.Size.X<9 or sbinfo.Size.Y<9:raise ValueError('small console') + windll.kernel32.GetConsoleCursorInfo(hconsole,byref(csinfo)) +except Exception: + pass +else: + import re,threading + is_vista=getattr(sys,"getwindowsversion",None)and sys.getwindowsversion()[0]>=6 + try: + _type=unicode + except NameError: + _type=str + to_int=lambda number,default:number and int(number)or default + wlock=threading.Lock() + STD_OUTPUT_HANDLE=-11 + STD_ERROR_HANDLE=-12 + class AnsiTerm(object): + def __init__(self): + self.encoding=sys.stdout.encoding + self.hconsole=windll.kernel32.GetStdHandle(STD_OUTPUT_HANDLE) + self.cursor_history=[] + self.orig_sbinfo=CONSOLE_SCREEN_BUFFER_INFO() + self.orig_csinfo=CONSOLE_CURSOR_INFO() + windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole,byref(self.orig_sbinfo)) + windll.kernel32.GetConsoleCursorInfo(hconsole,byref(self.orig_csinfo)) + def screen_buffer_info(self): + sbinfo=CONSOLE_SCREEN_BUFFER_INFO() + windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole,byref(sbinfo)) + return sbinfo + def clear_line(self,param): + mode=param and int(param)or 0 + sbinfo=self.screen_buffer_info() + if mode==1: + line_start=COORD(0,sbinfo.CursorPosition.Y) + line_length=sbinfo.Size.X + elif mode==2: + line_start=COORD(sbinfo.CursorPosition.X,sbinfo.CursorPosition.Y) + line_length=sbinfo.Size.X-sbinfo.CursorPosition.X + else: + line_start=sbinfo.CursorPosition + line_length=sbinfo.Size.X-sbinfo.CursorPosition.X + chars_written=c_ulong() + windll.kernel32.FillConsoleOutputCharacterW(self.hconsole,c_wchar(' '),line_length,line_start,byref(chars_written)) + windll.kernel32.FillConsoleOutputAttribute(self.hconsole,sbinfo.Attributes,line_length,line_start,byref(chars_written)) + def clear_screen(self,param): + mode=to_int(param,0) + sbinfo=self.screen_buffer_info() + if mode==1: + clear_start=COORD(0,0) + clear_length=sbinfo.CursorPosition.X*sbinfo.CursorPosition.Y + elif mode==2: + clear_start=COORD(0,0) + clear_length=sbinfo.Size.X*sbinfo.Size.Y + windll.kernel32.SetConsoleCursorPosition(self.hconsole,clear_start) + else: + clear_start=sbinfo.CursorPosition + clear_length=((sbinfo.Size.X-sbinfo.CursorPosition.X)+sbinfo.Size.X*(sbinfo.Size.Y-sbinfo.CursorPosition.Y)) + chars_written=c_ulong() + windll.kernel32.FillConsoleOutputCharacterW(self.hconsole,c_wchar(' '),clear_length,clear_start,byref(chars_written)) + windll.kernel32.FillConsoleOutputAttribute(self.hconsole,sbinfo.Attributes,clear_length,clear_start,byref(chars_written)) + def push_cursor(self,param): + sbinfo=self.screen_buffer_info() + self.cursor_history.append(sbinfo.CursorPosition) + def pop_cursor(self,param): + if self.cursor_history: + old_pos=self.cursor_history.pop() + windll.kernel32.SetConsoleCursorPosition(self.hconsole,old_pos) + def set_cursor(self,param): + y,sep,x=param.partition(';') + x=to_int(x,1)-1 + y=to_int(y,1)-1 + sbinfo=self.screen_buffer_info() + new_pos=COORD(min(max(0,x),sbinfo.Size.X),min(max(0,y),sbinfo.Size.Y)) + windll.kernel32.SetConsoleCursorPosition(self.hconsole,new_pos) + def set_column(self,param): + x=to_int(param,1)-1 + sbinfo=self.screen_buffer_info() + new_pos=COORD(min(max(0,x),sbinfo.Size.X),sbinfo.CursorPosition.Y) + windll.kernel32.SetConsoleCursorPosition(self.hconsole,new_pos) + def move_cursor(self,x_offset=0,y_offset=0): + sbinfo=self.screen_buffer_info() + new_pos=COORD(min(max(0,sbinfo.CursorPosition.X+x_offset),sbinfo.Size.X),min(max(0,sbinfo.CursorPosition.Y+y_offset),sbinfo.Size.Y)) + windll.kernel32.SetConsoleCursorPosition(self.hconsole,new_pos) + def move_up(self,param): + self.move_cursor(y_offset=-to_int(param,1)) + def move_down(self,param): + self.move_cursor(y_offset=to_int(param,1)) + def move_left(self,param): + self.move_cursor(x_offset=-to_int(param,1)) + def move_right(self,param): + self.move_cursor(x_offset=to_int(param,1)) + def next_line(self,param): + sbinfo=self.screen_buffer_info() + self.move_cursor(x_offset=-sbinfo.CursorPosition.X,y_offset=to_int(param,1)) + def prev_line(self,param): + sbinfo=self.screen_buffer_info() + self.move_cursor(x_offset=-sbinfo.CursorPosition.X,y_offset=-to_int(param,1)) + def rgb2bgr(self,c): + return((c&1)<<2)|(c&2)|((c&4)>>2) + def set_color(self,param): + cols=param.split(';') + sbinfo=CONSOLE_SCREEN_BUFFER_INFO() + windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole,byref(sbinfo)) + attr=sbinfo.Attributes + for c in cols: + if is_vista: + c=int(c) + else: + c=to_int(c,0) + if 29<c<38: + attr=(attr&0xfff0)|self.rgb2bgr(c-30) + elif 39<c<48: + attr=(attr&0xff0f)|(self.rgb2bgr(c-40)<<4) + elif c==0: + attr=self.orig_sbinfo.Attributes + elif c==1: + attr|=0x08 + elif c==4: + attr|=0x80 + elif c==7: + attr=(attr&0xff88)|((attr&0x70)>>4)|((attr&0x07)<<4) + windll.kernel32.SetConsoleTextAttribute(self.hconsole,attr) + def show_cursor(self,param): + csinfo.bVisible=1 + windll.kernel32.SetConsoleCursorInfo(self.hconsole,byref(csinfo)) + def hide_cursor(self,param): + csinfo.bVisible=0 + windll.kernel32.SetConsoleCursorInfo(self.hconsole,byref(csinfo)) + ansi_command_table={'A':move_up,'B':move_down,'C':move_right,'D':move_left,'E':next_line,'F':prev_line,'G':set_column,'H':set_cursor,'f':set_cursor,'J':clear_screen,'K':clear_line,'h':show_cursor,'l':hide_cursor,'m':set_color,'s':push_cursor,'u':pop_cursor,} + ansi_tokens=re.compile('(?:\x1b\[([0-9?;]*)([a-zA-Z])|([^\x1b]+))') + def write(self,text): + try: + wlock.acquire() + for param,cmd,txt in self.ansi_tokens.findall(text): + if cmd: + cmd_func=self.ansi_command_table.get(cmd) + if cmd_func: + cmd_func(self,param) + else: + self.writeconsole(txt) + finally: + wlock.release() + def writeconsole(self,txt): + chars_written=c_int() + writeconsole=windll.kernel32.WriteConsoleA + if isinstance(txt,_type): + writeconsole=windll.kernel32.WriteConsoleW + TINY_STEP=3000 + for x in range(0,len(txt),TINY_STEP): + tiny=txt[x:x+TINY_STEP] + writeconsole(self.hconsole,tiny,len(tiny),byref(chars_written),None) + def flush(self): + pass + def isatty(self): + return True + sys.stderr=sys.stdout=AnsiTerm() + os.environ['TERM']='vt100' diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/ansiterm.pyc b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/ansiterm.pyc Binary files differnew file mode 100644 index 0000000..db6cdd8 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/ansiterm.pyc diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/extras/__init__.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/extras/__init__.py new file mode 100644 index 0000000..efeed79 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/extras/__init__.py @@ -0,0 +1,4 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/extras/__init__.pyc b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/extras/__init__.pyc Binary files differnew file mode 100644 index 0000000..1882e61 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/extras/__init__.pyc diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/extras/autowaf.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/extras/autowaf.py new file mode 100644 index 0000000..056cd51 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/extras/autowaf.py @@ -0,0 +1,574 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import glob +import os +import subprocess +import sys +from waflib import Configure,Context,Logs,Node,Options,Task,Utils +from waflib.TaskGen import feature,before,after +global g_is_child +g_is_child=False +global g_step +g_step=0 +@feature('c','cxx') +@after('apply_incpaths') +def include_config_h(self): + self.env.append_value('INCPATHS',self.bld.bldnode.abspath()) +def set_options(opt,debug_by_default=False): + global g_step + if g_step>0: + return + dirs_options=opt.add_option_group('Installation directories','') + for k in('--prefix','--destdir'): + option=opt.parser.get_option(k) + if option: + opt.parser.remove_option(k) + dirs_options.add_option(option) + dirs_options.add_option('--bindir',type='string',help="Executable programs [Default: PREFIX/bin]") + dirs_options.add_option('--configdir',type='string',help="Configuration data [Default: PREFIX/etc]") + dirs_options.add_option('--datadir',type='string',help="Shared data [Default: PREFIX/share]") + dirs_options.add_option('--includedir',type='string',help="Header files [Default: PREFIX/include]") + dirs_options.add_option('--libdir',type='string',help="Libraries [Default: PREFIX/lib]") + dirs_options.add_option('--mandir',type='string',help="Manual pages [Default: DATADIR/man]") + dirs_options.add_option('--docdir',type='string',help="HTML documentation [Default: DATADIR/doc]") + if debug_by_default: + opt.add_option('--optimize',action='store_false',default=True,dest='debug',help="Build optimized binaries") + else: + opt.add_option('--debug',action='store_true',default=False,dest='debug',help="Build debuggable binaries") + opt.add_option('--pardebug',action='store_true',default=False,dest='pardebug',help="Build parallel-installable debuggable libraries with D suffix") + opt.add_option('--grind',action='store_true',default=False,dest='grind',help="Run tests in valgrind") + opt.add_option('--strict',action='store_true',default=False,dest='strict',help="Use strict compiler flags and show all warnings") + opt.add_option('--ultra-strict',action='store_true',default=False,dest='ultra_strict',help="Use even stricter compiler flags (likely to trigger many warnings in library headers)") + opt.add_option('--docs',action='store_true',default=False,dest='docs',help="Build documentation - requires doxygen") + opt.add_option('--lv2-user',action='store_true',default=False,dest='lv2_user',help="Install LV2 bundles to user location") + opt.add_option('--lv2-system',action='store_true',default=False,dest='lv2_system',help="Install LV2 bundles to system location") + dirs_options.add_option('--lv2dir',type='string',help="LV2 bundles [Default: LIBDIR/lv2]") + g_step=1 +def check_header(conf,lang,name,define='',mandatory=True): + includes='' + if sys.platform=="darwin": + includes='/opt/local/include' + if lang=='c': + check_func=conf.check_cc + elif lang=='cxx': + check_func=conf.check_cxx + else: + Logs.error("Unknown header language `%s'"%lang) + return + if define!='': + check_func(header_name=name,includes=includes,define_name=define,mandatory=mandatory) + else: + check_func(header_name=name,includes=includes,mandatory=mandatory) +def nameify(name): + return name.replace('/','_').replace('++','PP').replace('-','_').replace('.','_') +def define(conf,var_name,value): + conf.define(var_name,value) + conf.env[var_name]=value +def check_pkg(conf,name,**args): + if args['uselib_store'].lower()in conf.env['AUTOWAF_LOCAL_LIBS']: + return + class CheckType: + OPTIONAL=1 + MANDATORY=2 + var_name='CHECKED_'+nameify(args['uselib_store']) + check=not var_name in conf.env + mandatory=not'mandatory'in args or args['mandatory'] + if not check and'atleast_version'in args: + checked_version=conf.env['VERSION_'+name] + if checked_version and checked_version<args['atleast_version']: + check=True; + if not check and mandatory and conf.env[var_name]==CheckType.OPTIONAL: + check=True; + if check: + found=None + pkg_var_name='PKG_'+name.replace('-','_') + pkg_name=name + if conf.env.PARDEBUG: + args['mandatory']=False + found=conf.check_cfg(package=pkg_name+'D',args="--cflags --libs",**args) + if found: + pkg_name+='D' + if mandatory: + args['mandatory']=True + if not found: + found=conf.check_cfg(package=pkg_name,args="--cflags --libs",**args) + if found: + conf.env[pkg_var_name]=pkg_name + if'atleast_version'in args: + conf.env['VERSION_'+name]=args['atleast_version'] + if mandatory: + conf.env[var_name]=CheckType.MANDATORY + else: + conf.env[var_name]=CheckType.OPTIONAL +def normpath(path): + if sys.platform=='win32': + return os.path.normpath(path).replace('\\','/') + else: + return os.path.normpath(path) +def configure(conf): + global g_step + if g_step>1: + return + def append_cxx_flags(flags): + conf.env.append_value('CFLAGS',flags) + conf.env.append_value('CXXFLAGS',flags) + print('') + display_header('Global Configuration') + if Options.options.docs: + conf.load('doxygen') + conf.env['DOCS']=Options.options.docs + conf.env['DEBUG']=Options.options.debug or Options.options.pardebug + conf.env['PARDEBUG']=Options.options.pardebug + conf.env['PREFIX']=normpath(os.path.abspath(os.path.expanduser(conf.env['PREFIX']))) + def config_dir(var,opt,default): + if opt: + conf.env[var]=normpath(opt) + else: + conf.env[var]=normpath(default) + opts=Options.options + prefix=conf.env['PREFIX'] + config_dir('BINDIR',opts.bindir,os.path.join(prefix,'bin')) + config_dir('SYSCONFDIR',opts.configdir,os.path.join(prefix,'etc')) + config_dir('DATADIR',opts.datadir,os.path.join(prefix,'share')) + config_dir('INCLUDEDIR',opts.includedir,os.path.join(prefix,'include')) + config_dir('LIBDIR',opts.libdir,os.path.join(prefix,'lib')) + config_dir('MANDIR',opts.mandir,os.path.join(conf.env['DATADIR'],'man')) + config_dir('DOCDIR',opts.docdir,os.path.join(conf.env['DATADIR'],'doc')) + if Options.options.lv2dir: + conf.env['LV2DIR']=Options.options.lv2dir + elif Options.options.lv2_user: + if sys.platform=="darwin": + conf.env['LV2DIR']=os.path.join(os.getenv('HOME'),'Library/Audio/Plug-Ins/LV2') + elif sys.platform=="win32": + conf.env['LV2DIR']=os.path.join(os.getenv('APPDATA'),'LV2') + else: + conf.env['LV2DIR']=os.path.join(os.getenv('HOME'),'.lv2') + elif Options.options.lv2_system: + if sys.platform=="darwin": + conf.env['LV2DIR']='/Library/Audio/Plug-Ins/LV2' + elif sys.platform=="win32": + conf.env['LV2DIR']=os.path.join(os.getenv('COMMONPROGRAMFILES'),'LV2') + else: + conf.env['LV2DIR']=os.path.join(conf.env['LIBDIR'],'lv2') + else: + conf.env['LV2DIR']=os.path.join(conf.env['LIBDIR'],'lv2') + conf.env['LV2DIR']=normpath(conf.env['LV2DIR']) + if Options.options.docs: + doxygen=conf.find_program('doxygen') + if not doxygen: + conf.fatal("Doxygen is required to build with --docs") + dot=conf.find_program('dot') + if not dot: + conf.fatal("Graphviz (dot) is required to build with --docs") + if Options.options.debug: + if conf.env['MSVC_COMPILER']: + conf.env['CFLAGS']=['/Od','/Zi','/MTd'] + conf.env['CXXFLAGS']=['/Od','/Zi','/MTd'] + conf.env['LINKFLAGS']=['/DEBUG'] + else: + conf.env['CFLAGS']=['-O0','-g'] + conf.env['CXXFLAGS']=['-O0','-g'] + else: + if conf.env['MSVC_COMPILER']: + conf.env['CFLAGS']=['/MD'] + conf.env['CXXFLAGS']=['/MD'] + append_cxx_flags(['-DNDEBUG']) + if Options.options.ultra_strict: + Options.options.strict=True + conf.env.append_value('CFLAGS',['-Wredundant-decls','-Wstrict-prototypes','-Wmissing-prototypes','-Wcast-qual']) + conf.env.append_value('CXXFLAGS',['-Wcast-qual']) + if Options.options.strict: + conf.env.append_value('CFLAGS',['-pedantic','-Wshadow']) + conf.env.append_value('CXXFLAGS',['-ansi','-Wnon-virtual-dtor','-Woverloaded-virtual']) + append_cxx_flags(['-Wall','-Wcast-align','-Wextra','-Wmissing-declarations','-Wno-unused-parameter','-Wstrict-overflow','-Wundef','-Wwrite-strings','-fstrict-overflow']) + if not conf.check_cc(fragment=''' +#ifndef __clang__ +#error +#endif +int main() { return 0; }''',features='c',mandatory=False,execute=False,msg='Checking for clang'): + append_cxx_flags(['-Wlogical-op','-Wsuggest-attribute=noreturn','-Wunsafe-loop-optimizations']) + if not conf.env['MSVC_COMPILER']: + append_cxx_flags(['-fshow-column']) + conf.env.prepend_value('CFLAGS','-I'+os.path.abspath('.')) + conf.env.prepend_value('CXXFLAGS','-I'+os.path.abspath('.')) + display_msg(conf,"Install prefix",conf.env['PREFIX']) + display_msg(conf,"Debuggable build",str(conf.env['DEBUG'])) + display_msg(conf,"Build documentation",str(conf.env['DOCS'])) + print('') + g_step=2 +def set_c99_mode(conf): + if conf.env.MSVC_COMPILER: + conf.env.append_unique('CFLAGS',['-TP']) + else: + conf.env.append_unique('CFLAGS',['-std=c99']) +def set_local_lib(conf,name,has_objects): + var_name='HAVE_'+nameify(name.upper()) + define(conf,var_name,1) + if has_objects: + if type(conf.env['AUTOWAF_LOCAL_LIBS'])!=dict: + conf.env['AUTOWAF_LOCAL_LIBS']={} + conf.env['AUTOWAF_LOCAL_LIBS'][name.lower()]=True + else: + if type(conf.env['AUTOWAF_LOCAL_HEADERS'])!=dict: + conf.env['AUTOWAF_LOCAL_HEADERS']={} + conf.env['AUTOWAF_LOCAL_HEADERS'][name.lower()]=True +def append_property(obj,key,val): + if hasattr(obj,key): + setattr(obj,key,getattr(obj,key)+val) + else: + setattr(obj,key,val) +def use_lib(bld,obj,libs): + abssrcdir=os.path.abspath('.') + libs_list=libs.split() + for l in libs_list: + in_headers=l.lower()in bld.env['AUTOWAF_LOCAL_HEADERS'] + in_libs=l.lower()in bld.env['AUTOWAF_LOCAL_LIBS'] + if in_libs: + append_property(obj,'use',' lib%s '%l.lower()) + append_property(obj,'framework',bld.env['FRAMEWORK_'+l]) + if in_headers or in_libs: + inc_flag='-iquote '+os.path.join(abssrcdir,l.lower()) + for f in['CFLAGS','CXXFLAGS']: + if not inc_flag in bld.env[f]: + bld.env.prepend_value(f,inc_flag) + else: + append_property(obj,'uselib',' '+l) +@feature('c','cxx') +@before('apply_link') +def version_lib(self): + if sys.platform=='win32': + self.vnum=None + if self.env['PARDEBUG']: + applicable=['cshlib','cxxshlib','cstlib','cxxstlib'] + if[x for x in applicable if x in self.features]: + self.target=self.target+'D' +def set_lib_env(conf,name,version): + 'Set up environment for local library as if found via pkg-config.' + NAME=name.upper() + major_ver=version.split('.')[0] + pkg_var_name='PKG_'+name.replace('-','_')+'_'+major_ver + lib_name='%s-%s'%(name,major_ver) + if conf.env.PARDEBUG: + lib_name+='D' + conf.env[pkg_var_name]=lib_name + conf.env['INCLUDES_'+NAME]=['${INCLUDEDIR}/%s-%s'%(name,major_ver)] + conf.env['LIBPATH_'+NAME]=[conf.env.LIBDIR] + conf.env['LIB_'+NAME]=[lib_name] +def display_header(title): + Logs.pprint('BOLD',title) +def display_msg(conf,msg,status=None,color=None): + color='CYAN' + if type(status)==bool and status or status=="True": + color='GREEN' + elif type(status)==bool and not status or status=="False": + color='YELLOW' + Logs.pprint('BOLD'," *",sep='') + Logs.pprint('NORMAL',"%s"%msg.ljust(conf.line_just-3),sep='') + Logs.pprint('BOLD',":",sep='') + Logs.pprint(color,status) +def link_flags(env,lib): + return' '.join(map(lambda x:env['LIB_ST']%x,env['LIB_'+lib])) +def compile_flags(env,lib): + return' '.join(map(lambda x:env['CPPPATH_ST']%x,env['INCLUDES_'+lib])) +def set_recursive(): + global g_is_child + g_is_child=True +def is_child(): + global g_is_child + return g_is_child +def build_pc(bld,name,version,version_suffix,libs,subst_dict={}): + '''Build a pkg-config file for a library. + name -- uppercase variable name (e.g. 'SOMENAME') + version -- version string (e.g. '1.2.3') + version_suffix -- name version suffix (e.g. '2') + libs -- string/list of dependencies (e.g. 'LIBFOO GLIB') + ''' + pkg_prefix=bld.env['PREFIX'] + if pkg_prefix[-1]=='/': + pkg_prefix=pkg_prefix[:-1] + target=name.lower() + if version_suffix!='': + target+='-'+version_suffix + if bld.env['PARDEBUG']: + target+='D' + target+='.pc' + libdir=bld.env['LIBDIR'] + if libdir.startswith(pkg_prefix): + libdir=libdir.replace(pkg_prefix,'${exec_prefix}') + includedir=bld.env['INCLUDEDIR'] + if includedir.startswith(pkg_prefix): + includedir=includedir.replace(pkg_prefix,'${prefix}') + obj=bld(features='subst',source='%s.pc.in'%name.lower(),target=target,install_path=os.path.join(bld.env['LIBDIR'],'pkgconfig'),exec_prefix='${prefix}',PREFIX=pkg_prefix,EXEC_PREFIX='${prefix}',LIBDIR=libdir,INCLUDEDIR=includedir) + if type(libs)!=list: + libs=libs.split() + subst_dict[name+'_VERSION']=version + subst_dict[name+'_MAJOR_VERSION']=version[0:version.find('.')] + for i in libs: + subst_dict[i+'_LIBS']=link_flags(bld.env,i) + lib_cflags=compile_flags(bld.env,i) + if lib_cflags=='': + lib_cflags=' ' + subst_dict[i+'_CFLAGS']=lib_cflags + obj.__dict__.update(subst_dict) +def build_dir(name,subdir): + if is_child(): + return os.path.join('build',name,subdir) + else: + return os.path.join('build',subdir) +def make_simple_dox(name): + name=name.lower() + NAME=name.upper() + try: + top=os.getcwd() + os.chdir(build_dir(name,'doc/html')) + page='group__%s.html'%name + if not os.path.exists(page): + return + for i in[['%s_API '%NAME,''],['%s_DEPRECATED '%NAME,''],['group__%s.html'%name,''],[' ',''],['<script.*><\/script>',''],['<hr\/><a name="details" id="details"><\/a><h2>.*<\/h2>',''],['<link href=\"tabs.css\" rel=\"stylesheet\" type=\"text\/css\"\/>',''],['<img class=\"footer\" src=\"doxygen.png\" alt=\"doxygen\"\/>','Doxygen']]: + os.system("sed -i 's/%s/%s/g' %s"%(i[0],i[1],page)) + os.rename('group__%s.html'%name,'index.html') + for i in(glob.glob('*.png')+glob.glob('*.html')+glob.glob('*.js')+glob.glob('*.css')): + if i!='index.html'and i!='style.css': + os.remove(i) + os.chdir(top) + os.chdir(build_dir(name,'doc/man/man3')) + for i in glob.glob('*.3'): + os.system("sed -i 's/%s_API //' %s"%(NAME,i)) + for i in glob.glob('_*'): + os.remove(i) + os.chdir(top) + except Exception ,e: + Logs.error("Failed to fix up %s documentation: %s"%(name,e)) +def build_dox(bld,name,version,srcdir,blddir,outdir='',versioned=True): + if not bld.env['DOCS']: + return + if is_child(): + src_dir=os.path.join(srcdir,name.lower()) + doc_dir=os.path.join(blddir,name.lower(),'doc') + else: + src_dir=srcdir + doc_dir=os.path.join(blddir,'doc') + subst_tg=bld(features='subst',source='doc/reference.doxygen.in',target='doc/reference.doxygen',install_path='',name='doxyfile') + subst_dict={name+'_VERSION':version,name+'_SRCDIR':os.path.abspath(src_dir),name+'_DOC_DIR':os.path.abspath(doc_dir)} + subst_tg.__dict__.update(subst_dict) + subst_tg.post() + docs=bld(features='doxygen',doxyfile='doc/reference.doxygen') + docs.post() + outname=name.lower() + if versioned: + outname+='-%d'%int(version[0:version.find('.')]) + bld.install_files(os.path.join('${DOCDIR}',outname,outdir,'html'),bld.path.get_bld().ant_glob('doc/html/*')) + for i in range(1,8): + bld.install_files('${MANDIR}/man%d'%i,bld.path.get_bld().ant_glob('doc/man/man%d/*'%i,excl='**/_*')) +def build_version_files(header_path,source_path,domain,major,minor,micro): + header_path=os.path.abspath(header_path) + source_path=os.path.abspath(source_path) + text="int "+domain+"_major_version = "+str(major)+";\n" + text+="int "+domain+"_minor_version = "+str(minor)+";\n" + text+="int "+domain+"_micro_version = "+str(micro)+";\n" + try: + o=open(source_path,'w') + o.write(text) + o.close() + except IOError: + Logs.error('Failed to open %s for writing\n'%source_path) + sys.exit(-1) + text="#ifndef __"+domain+"_version_h__\n" + text+="#define __"+domain+"_version_h__\n" + text+="extern const char* "+domain+"_revision;\n" + text+="extern int "+domain+"_major_version;\n" + text+="extern int "+domain+"_minor_version;\n" + text+="extern int "+domain+"_micro_version;\n" + text+="#endif /* __"+domain+"_version_h__ */\n" + try: + o=open(header_path,'w') + o.write(text) + o.close() + except IOError: + Logs.warn('Failed to open %s for writing\n'%header_path) + sys.exit(-1) + return None +def build_i18n_pot(bld,srcdir,dir,name,sources,copyright_holder=None): + Logs.info('Generating pot file from %s'%name) + pot_file='%s.pot'%name + cmd=['xgettext','--keyword=_','--keyword=N_','--keyword=S_','--from-code=UTF-8','-o',pot_file] + if copyright_holder: + cmd+=['--copyright-holder="%s"'%copyright_holder] + cmd+=sources + Logs.info('Updating '+pot_file) + subprocess.call(cmd,cwd=os.path.join(srcdir,dir)) +def build_i18n_po(bld,srcdir,dir,name,sources,copyright_holder=None): + pwd=os.getcwd() + os.chdir(os.path.join(srcdir,dir)) + pot_file='%s.pot'%name + po_files=glob.glob('po/*.po') + for po_file in po_files: + cmd=['msgmerge','--update',po_file,pot_file] + Logs.info('Updating '+po_file) + subprocess.call(cmd) + os.chdir(pwd) +def build_i18n_mo(bld,srcdir,dir,name,sources,copyright_holder=None): + pwd=os.getcwd() + os.chdir(os.path.join(srcdir,dir)) + pot_file='%s.pot'%name + po_files=glob.glob('po/*.po') + for po_file in po_files: + mo_file=po_file.replace('.po','.mo') + cmd=['msgfmt','-c','-f','-o',mo_file,po_file] + Logs.info('Generating '+po_file) + subprocess.call(cmd) + os.chdir(pwd) +def build_i18n(bld,srcdir,dir,name,sources,copyright_holder=None): + build_i18n_pot(bld,srcdir,dir,name,sources,copyright_holder) + build_i18n_po(bld,srcdir,dir,name,sources,copyright_holder) + build_i18n_mo(bld,srcdir,dir,name,sources,copyright_holder) +def cd_to_build_dir(ctx,appname): + orig_dir=os.path.abspath(os.curdir) + top_level=(len(ctx.stack_path)>1) + if top_level: + os.chdir(os.path.join('build',appname)) + else: + os.chdir('build') + Logs.pprint('GREEN',"Waf: Entering directory `%s'"%os.path.abspath(os.getcwd())) +def cd_to_orig_dir(ctx,child): + if child: + os.chdir(os.path.join('..','..')) + else: + os.chdir('..') +def pre_test(ctx,appname,dirs=['src']): + diropts='' + for i in dirs: + diropts+=' -d '+i + cd_to_build_dir(ctx,appname) + clear_log=open('lcov-clear.log','w') + try: + try: + subprocess.call(('lcov %s -z'%diropts).split(),stdout=clear_log,stderr=clear_log) + except: + Logs.warn('Failed to run lcov, no coverage report will be generated') + finally: + clear_log.close() +def post_test(ctx,appname,dirs=['src'],remove=['*boost*','c++*']): + diropts='' + for i in dirs: + diropts+=' -d '+i + coverage_log=open('lcov-coverage.log','w') + coverage_lcov=open('coverage.lcov','w') + coverage_stripped_lcov=open('coverage-stripped.lcov','w') + try: + try: + base='.' + if g_is_child: + base='..' + subprocess.call(('lcov -c %s -b %s'%(diropts,base)).split(),stdout=coverage_lcov,stderr=coverage_log) + subprocess.call(['lcov','--remove','coverage.lcov']+remove,stdout=coverage_stripped_lcov,stderr=coverage_log) + if not os.path.isdir('coverage'): + os.makedirs('coverage') + subprocess.call('genhtml -o coverage coverage-stripped.lcov'.split(),stdout=coverage_log,stderr=coverage_log) + except: + Logs.warn('Failed to run lcov, no coverage report will be generated') + finally: + coverage_stripped_lcov.close() + coverage_lcov.close() + coverage_log.close() + print('') + Logs.pprint('GREEN',"Waf: Leaving directory `%s'"%os.path.abspath(os.getcwd())) + top_level=(len(ctx.stack_path)>1) + if top_level: + cd_to_orig_dir(ctx,top_level) + print('') + Logs.pprint('BOLD','Coverage:',sep='') + print('<file://%s>\n\n'%os.path.abspath('coverage/index.html')) +def run_test(ctx,appname,test,desired_status=0,dirs=['src'],name='',header=False): + s=test + if type(test)==type([]): + s=' '.join(i) + if header: + Logs.pprint('BOLD','** Test',sep='') + Logs.pprint('NORMAL','%s'%s) + cmd=test + if Options.options.grind: + cmd='valgrind '+test + if subprocess.call(cmd,shell=True)==desired_status: + Logs.pprint('GREEN','** Pass %s'%name) + return True + else: + Logs.pprint('RED','** FAIL %s'%name) + return False +def run_tests(ctx,appname,tests,desired_status=0,dirs=['src'],name='*',headers=False): + failures=0 + diropts='' + for i in dirs: + diropts+=' -d '+i + for i in tests: + if not run_test(ctx,appname,i,desired_status,dirs,i,headers): + failures+=1 + print('') + if failures==0: + Logs.pprint('GREEN','** Pass: All %s.%s tests passed'%(appname,name)) + else: + Logs.pprint('RED','** FAIL: %d %s.%s tests failed'%(failures,appname,name)) +def run_ldconfig(ctx): + if(ctx.cmd=='install'and not ctx.env['RAN_LDCONFIG']and ctx.env['LIBDIR']and not'DESTDIR'in os.environ and not Options.options.destdir): + try: + Logs.info("Waf: Running `/sbin/ldconfig %s'"%ctx.env['LIBDIR']) + subprocess.call(['/sbin/ldconfig',ctx.env['LIBDIR']]) + ctx.env['RAN_LDCONFIG']=True + except: + pass +def write_news(name,in_files,out_file,top_entries=None,extra_entries=None): + import rdflib + import textwrap + from time import strftime,strptime + doap=rdflib.Namespace('http://usefulinc.com/ns/doap#') + dcs=rdflib.Namespace('http://ontologi.es/doap-changeset#') + rdfs=rdflib.Namespace('http://www.w3.org/2000/01/rdf-schema#') + foaf=rdflib.Namespace('http://xmlns.com/foaf/0.1/') + rdf=rdflib.Namespace('http://www.w3.org/1999/02/22-rdf-syntax-ns#') + m=rdflib.ConjunctiveGraph() + try: + for i in in_files: + m.parse(i,format='n3') + except: + Logs.warn('Error parsing data, unable to generate NEWS') + return + proj=m.value(None,rdf.type,doap.Project) + for f in m.triples([proj,rdfs.seeAlso,None]): + if f[2].endswith('.ttl'): + m.parse(f[2],format='n3') + entries={} + for r in m.triples([proj,doap.release,None]): + release=r[2] + revision=m.value(release,doap.revision,None) + date=m.value(release,doap.created,None) + blamee=m.value(release,dcs.blame,None) + changeset=m.value(release,dcs.changeset,None) + dist=m.value(release,doap['file-release'],None) + if revision and date and blamee and changeset: + entry='%s (%s) stable;\n'%(name,revision) + for i in m.triples([changeset,dcs.item,None]): + item=textwrap.wrap(m.value(i[2],rdfs.label,None),width=79) + entry+='\n * '+'\n '.join(item) + if dist and top_entries is not None: + if not str(dist)in top_entries: + top_entries[str(dist)]=[] + top_entries[str(dist)]+=['%s: %s'%(name,'\n '.join(item))] + if extra_entries: + for i in extra_entries[str(dist)]: + entry+='\n * '+i + entry+='\n\n --' + blamee_name=m.value(blamee,foaf.name,None) + blamee_mbox=m.value(blamee,foaf.mbox,None) + if blamee_name and blamee_mbox: + entry+=' %s <%s>'%(blamee_name,blamee_mbox.replace('mailto:','')) + entry+=' %s\n\n'%(strftime('%a, %d %b %Y %H:%M:%S +0000',strptime(date,'%Y-%m-%d'))) + entries[(date,revision)]=entry + else: + Logs.warn('Ignored incomplete %s release description'%name) + if len(entries)>0: + news=open(out_file,'w') + for e in sorted(entries.keys(),reverse=True): + news.write(entries[e]) + news.close() diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/extras/autowaf.pyc b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/extras/autowaf.pyc Binary files differnew file mode 100644 index 0000000..a5b77d5 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/extras/autowaf.pyc diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/extras/doxygen.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/extras/doxygen.py new file mode 100644 index 0000000..7b7846e --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/extras/doxygen.py @@ -0,0 +1,148 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +from fnmatch import fnmatchcase +import os,os.path,re,stat +from waflib import Task,Utils,Node,Logs +from waflib.TaskGen import feature +DOXY_STR='${DOXYGEN} - ' +DOXY_FMTS='html latex man rft xml'.split() +DOXY_FILE_PATTERNS='*.'+' *.'.join(''' +c cc cxx cpp c++ java ii ixx ipp i++ inl h hh hxx hpp h++ idl odl cs php php3 +inc m mm py f90c cc cxx cpp c++ java ii ixx ipp i++ inl h hh hxx +'''.split()) +re_rl=re.compile('\\\\\r*\n',re.MULTILINE) +re_nl=re.compile('\r*\n',re.M) +def parse_doxy(txt): + tbl={} + txt=re_rl.sub('',txt) + lines=re_nl.split(txt) + for x in lines: + x=x.strip() + if not x or x.startswith('#')or x.find('=')<0: + continue + if x.find('+=')>=0: + tmp=x.split('+=') + key=tmp[0].strip() + if key in tbl: + tbl[key]+=' '+'+='.join(tmp[1:]).strip() + else: + tbl[key]='+='.join(tmp[1:]).strip() + else: + tmp=x.split('=') + tbl[tmp[0].strip()]='='.join(tmp[1:]).strip() + return tbl +class doxygen(Task.Task): + vars=['DOXYGEN','DOXYFLAGS'] + color='BLUE' + def runnable_status(self): + ''' + self.pars are populated in runnable_status - because this function is being + run *before* both self.pars "consumers" - scan() and run() + + set output_dir (node) for the output + ''' + for x in self.run_after: + if not x.hasrun: + return Task.ASK_LATER + if not getattr(self,'pars',None): + txt=self.inputs[0].read() + self.pars=parse_doxy(txt) + if not self.pars.get('OUTPUT_DIRECTORY'): + self.pars['OUTPUT_DIRECTORY']=self.inputs[0].parent.get_bld().abspath() + if getattr(self.generator,'pars',None): + for k,v in self.generator.pars.iteritems(): + self.pars[k]=v + self.doxy_inputs=getattr(self,'doxy_inputs',[]) + if not self.pars.get('INPUT'): + self.doxy_inputs.append(self.inputs[0].parent) + else: + for i in self.pars.get('INPUT').split(): + if os.path.isabs(i): + node=self.generator.bld.root.find_node(i) + else: + node=self.generator.path.find_node(i) + if not node: + self.generator.bld.fatal('Could not find the doxygen input %r'%i) + self.doxy_inputs.append(node) + if not getattr(self,'output_dir',None): + bld=self.generator.bld + self.output_dir=bld.root.find_dir(self.pars['OUTPUT_DIRECTORY']) + if not self.output_dir: + self.output_dir=bld.path.find_or_declare(self.pars['OUTPUT_DIRECTORY']) + self.signature() + return Task.Task.runnable_status(self) + def scan(self): + if self.pars.get('RECURSIVE')=='YES': + Logs.warn("Doxygen RECURSIVE dependencies are not supported") + exclude_patterns=self.pars.get('EXCLUDE_PATTERNS','').split() + file_patterns=self.pars.get('FILE_PATTERNS','').split() + if not file_patterns: + file_patterns=DOXY_FILE_PATTERNS + nodes=[] + names=[] + for node in self.doxy_inputs: + if os.path.isdir(node.abspath()): + for m in node.ant_glob(file_patterns): + nodes.append(m) + else: + nodes.append(node) + return(nodes,names) + def run(self): + dct=self.pars.copy() + dct['INPUT']=' '.join([x.abspath()for x in self.doxy_inputs]) + code='\n'.join(['%s = %s'%(x,dct[x])for x in self.pars]) + code=code + cmd=Utils.subst_vars(DOXY_STR,self.env) + env=self.env.env or None + proc=Utils.subprocess.Popen(cmd,shell=True,stdin=Utils.subprocess.PIPE,env=env,cwd=self.generator.bld.path.get_bld().abspath()) + proc.communicate(code) + return proc.returncode + def post_run(self): + nodes=self.output_dir.ant_glob('**/*',quiet=True) + for x in nodes: + x.sig=Utils.h_file(x.abspath()) + self.outputs+=nodes + return Task.Task.post_run(self) +class tar(Task.Task): + run_str='${TAR} ${TAROPTS} ${TGT} ${SRC}' + color='RED' + after=['doxygen'] + def runnable_status(self): + for x in getattr(self,'input_tasks',[]): + if not x.hasrun: + return Task.ASK_LATER + if not getattr(self,'tar_done_adding',None): + self.tar_done_adding=True + for x in getattr(self,'input_tasks',[]): + self.set_inputs(x.outputs) + if not self.inputs: + return Task.SKIP_ME + return Task.Task.runnable_status(self) + def __str__(self): + tgt_str=' '.join([a.nice_path(self.env)for a in self.outputs]) + return'%s: %s\n'%(self.__class__.__name__,tgt_str) +@feature('doxygen') +def process_doxy(self): + if not getattr(self,'doxyfile',None): + self.generator.bld.fatal('no doxyfile??') + node=self.doxyfile + if not isinstance(node,Node.Node): + node=self.path.find_resource(node) + if not node: + raise ValueError('doxygen file not found') + dsk=self.create_task('doxygen',node) + if getattr(self,'doxy_tar',None): + tsk=self.create_task('tar') + tsk.input_tasks=[dsk] + tsk.set_outputs(self.path.find_or_declare(self.doxy_tar)) + if self.doxy_tar.endswith('bz2'): + tsk.env['TAROPTS']=['cjf'] + elif self.doxy_tar.endswith('gz'): + tsk.env['TAROPTS']=['czf'] + else: + tsk.env['TAROPTS']=['cf'] +def configure(conf): + conf.find_program('doxygen',var='DOXYGEN') + conf.find_program('tar',var='TAR') diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/extras/swig.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/extras/swig.py new file mode 100644 index 0000000..4b6f50b --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/extras/swig.py @@ -0,0 +1,120 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import re +from waflib import Task,Utils,Logs +from waflib.TaskGen import extension,feature,after_method +from waflib.Configure import conf +from waflib.Tools import c_preproc +SWIG_EXTS=['.swig','.i'] +re_module=re.compile('%module(?:\s*\(.*\))?\s+(.+)',re.M) +re_1=re.compile(r'^%module.*?\s+([\w]+)\s*?$',re.M) +re_2=re.compile('%include "(.*)"',re.M) +re_3=re.compile('#include "(.*)"',re.M) +class swig(Task.Task): + color='BLUE' + run_str='${SWIG} ${SWIGFLAGS} ${SWIGPATH_ST:INCPATHS} ${SWIGDEF_ST:DEFINES} ${SRC}' + ext_out=['.h'] + vars=['SWIG_VERSION','SWIGDEPS'] + def runnable_status(self): + for t in self.run_after: + if not t.hasrun: + return Task.ASK_LATER + if not getattr(self,'init_outputs',None): + self.init_outputs=True + if not getattr(self,'module',None): + txt=self.inputs[0].read() + m=re_module.search(txt) + if not m: + raise ValueError("could not find the swig module name") + self.module=m.group(1) + swig_c(self) + for x in self.env['SWIGFLAGS']: + x=x[1:] + try: + fun=swig_langs[x] + except KeyError: + pass + else: + fun(self) + return super(swig,self).runnable_status() + def scan(self): + env=self.env + lst_src=[] + seen=[] + to_see=[self.inputs[0]] + while to_see: + node=to_see.pop(0) + if node in seen: + continue + seen.append(node) + lst_src.append(node) + code=node.read() + code=c_preproc.re_nl.sub('',code) + code=c_preproc.re_cpp.sub(c_preproc.repl,code) + names=re_2.findall(code)+re_3.findall(code) + for n in names: + for d in self.generator.includes_nodes+[node.parent]: + u=d.find_resource(n) + if u: + to_see.append(u) + break + else: + Logs.warn('could not find %r'%n) + return(lst_src,[]) +swig_langs={} +def swigf(fun): + swig_langs[fun.__name__.replace('swig_','')]=fun +swig.swigf=swigf +def swig_c(self): + ext='.swigwrap_%d.c'%self.generator.idx + flags=self.env['SWIGFLAGS'] + if'-c++'in flags: + ext+='xx' + out_node=self.inputs[0].parent.find_or_declare(self.module+ext) + if'-c++'in flags: + c_tsk=self.generator.cxx_hook(out_node) + else: + c_tsk=self.generator.c_hook(out_node) + c_tsk.set_run_after(self) + ge=self.generator.bld.producer + ge.outstanding.insert(0,c_tsk) + ge.total+=1 + try: + ltask=self.generator.link_task + except AttributeError: + pass + else: + ltask.set_run_after(c_tsk) + ltask.inputs.append(c_tsk.outputs[0]) + self.outputs.append(out_node) + if not'-o'in self.env['SWIGFLAGS']: + self.env.append_value('SWIGFLAGS',['-o',self.outputs[0].abspath()]) +@swigf +def swig_python(tsk): + tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module+'.py')) +@swigf +def swig_ocaml(tsk): + tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module+'.ml')) + tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module+'.mli')) +@extension(*SWIG_EXTS) +def i_file(self,node): + tsk=self.create_task('swig') + tsk.set_inputs(node) + tsk.module=getattr(self,'swig_module',None) + flags=self.to_list(getattr(self,'swig_flags',[])) + tsk.env.append_value('SWIGFLAGS',flags) +@conf +def check_swig_version(self): + reg_swig=re.compile(r'SWIG Version\s(.*)',re.M) + swig_out=self.cmd_and_log('%s -version'%self.env['SWIG']) + swigver=tuple([int(s)for s in reg_swig.findall(swig_out)[0].split('.')]) + self.env['SWIG_VERSION']=swigver + msg='Checking for swig version' + self.msg(msg,'.'.join(map(str,swigver))) + return swigver +def configure(conf): + swig=conf.find_program('swig',var='SWIG') + conf.env.SWIGPATH_ST='-I%s' + conf.env.SWIGDEF_ST='-D%s' diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/fixpy2.py b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/fixpy2.py new file mode 100644 index 0000000..98f7036 --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/fixpy2.py @@ -0,0 +1,53 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os +all_modifs={} +def fixdir(dir): + global all_modifs + for k in all_modifs: + for v in all_modifs[k]: + modif(os.path.join(dir,'waflib'),k,v) +def modif(dir,name,fun): + if name=='*': + lst=[] + for y in'. Tools extras'.split(): + for x in os.listdir(os.path.join(dir,y)): + if x.endswith('.py'): + lst.append(y+os.sep+x) + for x in lst: + modif(dir,x,fun) + return + filename=os.path.join(dir,name) + f=open(filename,'r') + try: + txt=f.read() + finally: + f.close() + txt=fun(txt) + f=open(filename,'w') + try: + f.write(txt) + finally: + f.close() +def subst(*k): + def do_subst(fun): + global all_modifs + for x in k: + try: + all_modifs[x].append(fun) + except KeyError: + all_modifs[x]=[fun] + return fun + return do_subst +@subst('*') +def r1(code): + code=code.replace(',e:',',e:') + code=code.replace("",'') + code=code.replace('','') + return code +@subst('Runner.py') +def r4(code): + code=code.replace('next(self.biter)','self.biter.next()') + return code diff --git a/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/fixpy2.pyc b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/fixpy2.pyc Binary files differnew file mode 100644 index 0000000..cb2a53c --- /dev/null +++ b/.waf-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/fixpy2.pyc diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Build.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Build.py new file mode 100644 index 0000000..bfa4f61 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Build.py @@ -0,0 +1,758 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,sys,errno,re,shutil +try: + import cPickle +except ImportError: + import pickle as cPickle +from waflib import Runner,TaskGen,Utils,ConfigSet,Task,Logs,Options,Context,Errors +import waflib.Node +CACHE_DIR='c4che' +CACHE_SUFFIX='_cache.py' +INSTALL=1337 +UNINSTALL=-1337 +SAVED_ATTRS='root node_deps raw_deps task_sigs'.split() +CFG_FILES='cfg_files' +POST_AT_ONCE=0 +POST_LAZY=1 +POST_BOTH=2 +class BuildContext(Context.Context): + '''executes the build''' + cmd='build' + variant='' + def __init__(self,**kw): + super(BuildContext,self).__init__(**kw) + self.is_install=0 + self.top_dir=kw.get('top_dir',Context.top_dir) + self.run_dir=kw.get('run_dir',Context.run_dir) + self.post_mode=POST_AT_ONCE + self.out_dir=kw.get('out_dir',Context.out_dir) + self.cache_dir=kw.get('cache_dir',None) + if not self.cache_dir: + self.cache_dir=self.out_dir+os.sep+CACHE_DIR + self.all_envs={} + self.task_sigs={} + self.node_deps={} + self.raw_deps={} + self.cache_dir_contents={} + self.task_gen_cache_names={} + self.launch_dir=Context.launch_dir + self.jobs=Options.options.jobs + self.targets=Options.options.targets + self.keep=Options.options.keep + self.cache_global=Options.cache_global + self.nocache=Options.options.nocache + self.progress_bar=Options.options.progress_bar + self.deps_man=Utils.defaultdict(list) + self.current_group=0 + self.groups=[] + self.group_names={} + def get_variant_dir(self): + if not self.variant: + return self.out_dir + return os.path.join(self.out_dir,self.variant) + variant_dir=property(get_variant_dir,None) + def __call__(self,*k,**kw): + kw['bld']=self + ret=TaskGen.task_gen(*k,**kw) + self.task_gen_cache_names={} + self.add_to_group(ret,group=kw.get('group',None)) + return ret + def rule(self,*k,**kw): + def f(rule): + ret=self(*k,**kw) + ret.rule=rule + return ret + return f + def __copy__(self): + raise Errors.WafError('build contexts are not supposed to be copied') + def install_files(self,*k,**kw): + pass + def install_as(self,*k,**kw): + pass + def symlink_as(self,*k,**kw): + pass + def load_envs(self): + node=self.root.find_node(self.cache_dir) + if not node: + raise Errors.WafError('The project was not configured: run "waf configure" first!') + lst=node.ant_glob('**/*%s'%CACHE_SUFFIX,quiet=True) + if not lst: + raise Errors.WafError('The cache directory is empty: reconfigure the project') + for x in lst: + name=x.path_from(node).replace(CACHE_SUFFIX,'').replace('\\','/') + env=ConfigSet.ConfigSet(x.abspath()) + self.all_envs[name]=env + for f in env[CFG_FILES]: + newnode=self.root.find_resource(f) + try: + h=Utils.h_file(newnode.abspath()) + except(IOError,AttributeError): + Logs.error('cannot find %r'%f) + h=Utils.SIG_NIL + newnode.sig=h + def init_dirs(self): + if not(os.path.isabs(self.top_dir)and os.path.isabs(self.out_dir)): + raise Errors.WafError('The project was not configured: run "waf configure" first!') + self.path=self.srcnode=self.root.find_dir(self.top_dir) + self.bldnode=self.root.make_node(self.variant_dir) + self.bldnode.mkdir() + def execute(self): + self.restore() + if not self.all_envs: + self.load_envs() + self.execute_build() + def execute_build(self): + Logs.info("Waf: Entering directory `%s'"%self.variant_dir) + self.recurse([self.run_dir]) + self.pre_build() + self.timer=Utils.Timer() + if self.progress_bar: + sys.stderr.write(Logs.colors.cursor_off) + try: + self.compile() + finally: + if self.progress_bar==1: + c=len(self.returned_tasks)or 1 + self.to_log(self.progress_line(c,c,Logs.colors.BLUE,Logs.colors.NORMAL)) + print('') + sys.stdout.flush() + sys.stderr.write(Logs.colors.cursor_on) + Logs.info("Waf: Leaving directory `%s'"%self.variant_dir) + self.post_build() + def restore(self): + try: + env=ConfigSet.ConfigSet(os.path.join(self.cache_dir,'build.config.py')) + except(IOError,OSError): + pass + else: + if env['version']<Context.HEXVERSION: + raise Errors.WafError('Version mismatch! reconfigure the project') + for t in env['tools']: + self.setup(**t) + dbfn=os.path.join(self.variant_dir,Context.DBFILE) + try: + data=Utils.readf(dbfn,'rb') + except(IOError,EOFError): + Logs.debug('build: Could not load the build cache %s (missing)'%dbfn) + else: + try: + waflib.Node.pickle_lock.acquire() + waflib.Node.Nod3=self.node_class + try: + data=cPickle.loads(data) + except Exception as e: + Logs.debug('build: Could not pickle the build cache %s: %r'%(dbfn,e)) + else: + for x in SAVED_ATTRS: + setattr(self,x,data[x]) + finally: + waflib.Node.pickle_lock.release() + self.init_dirs() + def store(self): + data={} + for x in SAVED_ATTRS: + data[x]=getattr(self,x) + db=os.path.join(self.variant_dir,Context.DBFILE) + try: + waflib.Node.pickle_lock.acquire() + waflib.Node.Nod3=self.node_class + x=cPickle.dumps(data,-1) + finally: + waflib.Node.pickle_lock.release() + Utils.writef(db+'.tmp',x,m='wb') + try: + st=os.stat(db) + os.remove(db) + if not Utils.is_win32: + os.chown(db+'.tmp',st.st_uid,st.st_gid) + except(AttributeError,OSError): + pass + os.rename(db+'.tmp',db) + def compile(self): + Logs.debug('build: compile()') + self.producer=Runner.Parallel(self,self.jobs) + self.producer.biter=self.get_build_iterator() + self.returned_tasks=[] + try: + self.producer.start() + except KeyboardInterrupt: + self.store() + raise + else: + if self.producer.dirty: + self.store() + if self.producer.error: + raise Errors.BuildError(self.producer.error) + def setup(self,tool,tooldir=None,funs=None): + if isinstance(tool,list): + for i in tool:self.setup(i,tooldir) + return + module=Context.load_tool(tool,tooldir) + if hasattr(module,"setup"):module.setup(self) + def get_env(self): + try: + return self.all_envs[self.variant] + except KeyError: + return self.all_envs[''] + def set_env(self,val): + self.all_envs[self.variant]=val + env=property(get_env,set_env) + def add_manual_dependency(self,path,value): + if path is None: + raise ValueError('Invalid input') + if isinstance(path,waflib.Node.Node): + node=path + elif os.path.isabs(path): + node=self.root.find_resource(path) + else: + node=self.path.find_resource(path) + if isinstance(value,list): + self.deps_man[id(node)].extend(value) + else: + self.deps_man[id(node)].append(value) + def launch_node(self): + try: + return self.p_ln + except AttributeError: + self.p_ln=self.root.find_dir(self.launch_dir) + return self.p_ln + def hash_env_vars(self,env,vars_lst): + if not env.table: + env=env.parent + if not env: + return Utils.SIG_NIL + idx=str(id(env))+str(vars_lst) + try: + cache=self.cache_env + except AttributeError: + cache=self.cache_env={} + else: + try: + return self.cache_env[idx] + except KeyError: + pass + lst=[env[a]for a in vars_lst] + ret=Utils.h_list(lst) + Logs.debug('envhash: %s %r',Utils.to_hex(ret),lst) + cache[idx]=ret + return ret + def get_tgen_by_name(self,name): + cache=self.task_gen_cache_names + if not cache: + for g in self.groups: + for tg in g: + try: + cache[tg.name]=tg + except AttributeError: + pass + try: + return cache[name] + except KeyError: + raise Errors.WafError('Could not find a task generator for the name %r'%name) + def progress_line(self,state,total,col1,col2): + n=len(str(total)) + Utils.rot_idx+=1 + ind=Utils.rot_chr[Utils.rot_idx%4] + pc=(100.*state)/total + eta=str(self.timer) + fs="[%%%dd/%%%dd][%%s%%2d%%%%%%s][%s]["%(n,n,ind) + left=fs%(state,total,col1,pc,col2) + right='][%s%s%s]'%(col1,eta,col2) + cols=Logs.get_term_cols()-len(left)-len(right)+2*len(col1)+2*len(col2) + if cols<7:cols=7 + ratio=((cols*state)//total)-1 + bar=('='*ratio+'>').ljust(cols) + msg=Utils.indicator%(left,bar,right) + return msg + def declare_chain(self,*k,**kw): + return TaskGen.declare_chain(*k,**kw) + def pre_build(self): + for m in getattr(self,'pre_funs',[]): + m(self) + def post_build(self): + for m in getattr(self,'post_funs',[]): + m(self) + def add_pre_fun(self,meth): + try: + self.pre_funs.append(meth) + except AttributeError: + self.pre_funs=[meth] + def add_post_fun(self,meth): + try: + self.post_funs.append(meth) + except AttributeError: + self.post_funs=[meth] + def get_group(self,x): + if not self.groups: + self.add_group() + if x is None: + return self.groups[self.current_group] + if x in self.group_names: + return self.group_names[x] + return self.groups[x] + def add_to_group(self,tgen,group=None): + assert(isinstance(tgen,TaskGen.task_gen)or isinstance(tgen,Task.TaskBase)) + tgen.bld=self + self.get_group(group).append(tgen) + def get_group_name(self,g): + if not isinstance(g,list): + g=self.groups[g] + for x in self.group_names: + if id(self.group_names[x])==id(g): + return x + return'' + def get_group_idx(self,tg): + se=id(tg) + for i in range(len(self.groups)): + for t in self.groups[i]: + if id(t)==se: + return i + return None + def add_group(self,name=None,move=True): + if name and name in self.group_names: + Logs.error('add_group: name %s already present'%name) + g=[] + self.group_names[name]=g + self.groups.append(g) + if move: + self.current_group=len(self.groups)-1 + def set_group(self,idx): + if isinstance(idx,str): + g=self.group_names[idx] + for i in range(len(self.groups)): + if id(g)==id(self.groups[i]): + self.current_group=i + else: + self.current_group=idx + def total(self): + total=0 + for group in self.groups: + for tg in group: + try: + total+=len(tg.tasks) + except AttributeError: + total+=1 + return total + def get_targets(self): + to_post=[] + min_grp=0 + for name in self.targets.split(','): + tg=self.get_tgen_by_name(name) + if not tg: + raise Errors.WafError('target %r does not exist'%name) + m=self.get_group_idx(tg) + if m>min_grp: + min_grp=m + to_post=[tg] + elif m==min_grp: + to_post.append(tg) + return(min_grp,to_post) + def get_all_task_gen(self): + lst=[] + for g in self.groups: + lst.extend(g) + return lst + def post_group(self): + if self.targets=='*': + for tg in self.groups[self.cur]: + try: + f=tg.post + except AttributeError: + pass + else: + f() + elif self.targets: + if self.cur<self._min_grp: + for tg in self.groups[self.cur]: + try: + f=tg.post + except AttributeError: + pass + else: + f() + else: + for tg in self._exact_tg: + tg.post() + else: + ln=self.launch_node() + if ln.is_child_of(self.bldnode): + Logs.warn('Building from the build directory, forcing --targets=*') + ln=self.srcnode + elif not ln.is_child_of(self.srcnode): + Logs.warn('CWD %s is not under %s, forcing --targets=* (run distclean?)'%(ln.abspath(),self.srcnode.abspath())) + ln=self.srcnode + for tg in self.groups[self.cur]: + try: + f=tg.post + except AttributeError: + pass + else: + if tg.path.is_child_of(ln): + f() + def get_tasks_group(self,idx): + tasks=[] + for tg in self.groups[idx]: + try: + tasks.extend(tg.tasks) + except AttributeError: + tasks.append(tg) + return tasks + def get_build_iterator(self): + self.cur=0 + if self.targets and self.targets!='*': + (self._min_grp,self._exact_tg)=self.get_targets() + global lazy_post + if self.post_mode!=POST_LAZY: + while self.cur<len(self.groups): + self.post_group() + self.cur+=1 + self.cur=0 + while self.cur<len(self.groups): + if self.post_mode!=POST_AT_ONCE: + self.post_group() + tasks=self.get_tasks_group(self.cur) + Task.set_file_constraints(tasks) + Task.set_precedence_constraints(tasks) + self.cur_tasks=tasks + self.cur+=1 + if not tasks: + continue + yield tasks + while 1: + yield[] +class inst(Task.Task): + color='CYAN' + def uid(self): + lst=[self.dest,self.path]+self.source + return Utils.h_list(repr(lst)) + def post(self): + buf=[] + for x in self.source: + if isinstance(x,waflib.Node.Node): + y=x + else: + y=self.path.find_resource(x) + if not y: + if Logs.verbose: + Logs.warn('Could not find %s immediately (may cause broken builds)'%x) + idx=self.generator.bld.get_group_idx(self) + for tg in self.generator.bld.groups[idx]: + if not isinstance(tg,inst)and id(tg)!=id(self): + tg.post() + y=self.path.find_resource(x) + if y: + break + else: + raise Errors.WafError('Could not find %r in %r'%(x,self.path)) + buf.append(y) + self.inputs=buf + def runnable_status(self): + ret=super(inst,self).runnable_status() + if ret==Task.SKIP_ME: + return Task.RUN_ME + return ret + def __str__(self): + return'' + def run(self): + return self.generator.exec_task() + def get_install_path(self,destdir=True): + dest=Utils.subst_vars(self.dest,self.env) + dest=dest.replace('/',os.sep) + if destdir and Options.options.destdir: + dest=os.path.join(Options.options.destdir,os.path.splitdrive(dest)[1].lstrip(os.sep)) + return dest + def exec_install_files(self): + destpath=self.get_install_path() + if not destpath: + raise Errors.WafError('unknown installation path %r'%self.generator) + for x,y in zip(self.source,self.inputs): + if self.relative_trick: + destfile=os.path.join(destpath,y.path_from(self.path)) + else: + destfile=os.path.join(destpath,y.name) + self.generator.bld.do_install(y.abspath(),destfile,self.chmod) + def exec_install_as(self): + destfile=self.get_install_path() + self.generator.bld.do_install(self.inputs[0].abspath(),destfile,self.chmod) + def exec_symlink_as(self): + destfile=self.get_install_path() + src=self.link + if self.relative_trick: + src=os.path.relpath(src,os.path.dirname(destfile)) + self.generator.bld.do_link(src,destfile) +class InstallContext(BuildContext): + '''installs the targets on the system''' + cmd='install' + def __init__(self,**kw): + super(InstallContext,self).__init__(**kw) + self.uninstall=[] + self.is_install=INSTALL + def do_install(self,src,tgt,chmod=Utils.O644): + d,_=os.path.split(tgt) + if not d: + raise Errors.WafError('Invalid installation given %r->%r'%(src,tgt)) + Utils.check_dir(d) + srclbl=src.replace(self.srcnode.abspath()+os.sep,'') + if not Options.options.force: + try: + st1=os.stat(tgt) + st2=os.stat(src) + except OSError: + pass + else: + if st1.st_mtime+2>=st2.st_mtime and st1.st_size==st2.st_size: + if not self.progress_bar: + Logs.info('- install %s (from %s)'%(tgt,srclbl)) + return False + if not self.progress_bar: + Logs.info('+ install %s (from %s)'%(tgt,srclbl)) + try: + os.remove(tgt) + except OSError: + pass + try: + shutil.copy2(src,tgt) + os.chmod(tgt,chmod) + except IOError: + try: + os.stat(src) + except(OSError,IOError): + Logs.error('File %r does not exist'%src) + raise Errors.WafError('Could not install the file %r'%tgt) + def do_link(self,src,tgt): + d,_=os.path.split(tgt) + Utils.check_dir(d) + link=False + if not os.path.islink(tgt): + link=True + elif os.readlink(tgt)!=src: + link=True + if link: + try:os.remove(tgt) + except OSError:pass + if not self.progress_bar: + Logs.info('+ symlink %s (to %s)'%(tgt,src)) + os.symlink(src,tgt) + else: + if not self.progress_bar: + Logs.info('- symlink %s (to %s)'%(tgt,src)) + def run_task_now(self,tsk,postpone): + tsk.post() + if not postpone: + if tsk.runnable_status()==Task.ASK_LATER: + raise self.WafError('cannot post the task %r'%tsk) + tsk.run() + def install_files(self,dest,files,env=None,chmod=Utils.O644,relative_trick=False,cwd=None,add=True,postpone=True): + tsk=inst(env=env or self.env) + tsk.bld=self + tsk.path=cwd or self.path + tsk.chmod=chmod + if isinstance(files,waflib.Node.Node): + tsk.source=[files] + else: + tsk.source=Utils.to_list(files) + tsk.dest=dest + tsk.exec_task=tsk.exec_install_files + tsk.relative_trick=relative_trick + if add:self.add_to_group(tsk) + self.run_task_now(tsk,postpone) + return tsk + def install_as(self,dest,srcfile,env=None,chmod=Utils.O644,cwd=None,add=True,postpone=True): + tsk=inst(env=env or self.env) + tsk.bld=self + tsk.path=cwd or self.path + tsk.chmod=chmod + tsk.source=[srcfile] + tsk.dest=dest + tsk.exec_task=tsk.exec_install_as + if add:self.add_to_group(tsk) + self.run_task_now(tsk,postpone) + return tsk + def symlink_as(self,dest,src,env=None,cwd=None,add=True,postpone=True,relative_trick=False): + if Utils.is_win32: + return + tsk=inst(env=env or self.env) + tsk.bld=self + tsk.dest=dest + tsk.path=cwd or self.path + tsk.source=[] + tsk.link=src + tsk.relative_trick=relative_trick + tsk.exec_task=tsk.exec_symlink_as + if add:self.add_to_group(tsk) + self.run_task_now(tsk,postpone) + return tsk +class UninstallContext(InstallContext): + '''removes the targets installed''' + cmd='uninstall' + def __init__(self,**kw): + super(UninstallContext,self).__init__(**kw) + self.is_install=UNINSTALL + def do_install(self,src,tgt,chmod=Utils.O644): + if not self.progress_bar: + Logs.info('- remove %s'%tgt) + self.uninstall.append(tgt) + try: + os.remove(tgt) + except OSError as e: + if e.errno!=errno.ENOENT: + if not getattr(self,'uninstall_error',None): + self.uninstall_error=True + Logs.warn('build: some files could not be uninstalled (retry with -vv to list them)') + if Logs.verbose>1: + Logs.warn('Could not remove %s (error code %r)'%(e.filename,e.errno)) + while tgt: + tgt=os.path.dirname(tgt) + try: + os.rmdir(tgt) + except OSError: + break + def do_link(self,src,tgt): + try: + if not self.progress_bar: + Logs.info('- remove %s'%tgt) + os.remove(tgt) + except OSError: + pass + while tgt: + tgt=os.path.dirname(tgt) + try: + os.rmdir(tgt) + except OSError: + break + def execute(self): + try: + def runnable_status(self): + return Task.SKIP_ME + setattr(Task.Task,'runnable_status_back',Task.Task.runnable_status) + setattr(Task.Task,'runnable_status',runnable_status) + super(UninstallContext,self).execute() + finally: + setattr(Task.Task,'runnable_status',Task.Task.runnable_status_back) +class CleanContext(BuildContext): + '''cleans the project''' + cmd='clean' + def execute(self): + self.restore() + if not self.all_envs: + self.load_envs() + self.recurse([self.run_dir]) + try: + self.clean() + finally: + self.store() + def clean(self): + Logs.debug('build: clean called') + if self.bldnode!=self.srcnode: + lst=[] + for e in self.all_envs.values(): + lst.extend(self.root.find_or_declare(f)for f in e[CFG_FILES]) + for n in self.bldnode.ant_glob('**/*',excl='.lock* *conf_check_*/** config.log c4che/*',quiet=True): + if n in lst: + continue + n.delete() + self.root.children={} + for v in'node_deps task_sigs raw_deps'.split(): + setattr(self,v,{}) +class ListContext(BuildContext): + '''lists the targets to execute''' + cmd='list' + def execute(self): + self.restore() + if not self.all_envs: + self.load_envs() + self.recurse([self.run_dir]) + self.pre_build() + self.timer=Utils.Timer() + for g in self.groups: + for tg in g: + try: + f=tg.post + except AttributeError: + pass + else: + f() + try: + self.get_tgen_by_name('') + except Exception: + pass + lst=list(self.task_gen_cache_names.keys()) + lst.sort() + for k in lst: + Logs.pprint('GREEN',k) +class StepContext(BuildContext): + '''executes tasks in a step-by-step fashion, for debugging''' + cmd='step' + def __init__(self,**kw): + super(StepContext,self).__init__(**kw) + self.files=Options.options.files + def compile(self): + if not self.files: + Logs.warn('Add a pattern for the debug build, for example "waf step --files=main.c,app"') + BuildContext.compile(self) + return + targets=None + if self.targets and self.targets!='*': + targets=self.targets.split(',') + for g in self.groups: + for tg in g: + if targets and tg.name not in targets: + continue + try: + f=tg.post + except AttributeError: + pass + else: + f() + for pat in self.files.split(','): + matcher=self.get_matcher(pat) + for tg in g: + if isinstance(tg,Task.TaskBase): + lst=[tg] + else: + lst=tg.tasks + for tsk in lst: + do_exec=False + for node in getattr(tsk,'inputs',[]): + if matcher(node,output=False): + do_exec=True + break + for node in getattr(tsk,'outputs',[]): + if matcher(node,output=True): + do_exec=True + break + if do_exec: + ret=tsk.run() + Logs.info('%s -> exit %r'%(str(tsk),ret)) + def get_matcher(self,pat): + inn=True + out=True + if pat.startswith('in:'): + out=False + pat=pat.replace('in:','') + elif pat.startswith('out:'): + inn=False + pat=pat.replace('out:','') + anode=self.root.find_node(pat) + pattern=None + if not anode: + if not pat.startswith('^'): + pat='^.+?%s'%pat + if not pat.endswith('$'): + pat='%s$'%pat + pattern=re.compile(pat) + def match(node,output): + if output==True and not out: + return False + if output==False and not inn: + return False + if anode: + return anode==node + else: + return pattern.match(node.abspath()) + return match +BuildContext.store=Utils.nogc(BuildContext.store) +BuildContext.restore=Utils.nogc(BuildContext.restore) diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/ConfigSet.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/ConfigSet.py new file mode 100644 index 0000000..f9fdcc7 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/ConfigSet.py @@ -0,0 +1,152 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import copy,re,os +from waflib import Logs,Utils +re_imp=re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$',re.M) +class ConfigSet(object): + __slots__=('table','parent') + def __init__(self,filename=None): + self.table={} + if filename: + self.load(filename) + def __contains__(self,key): + if key in self.table:return True + try:return self.parent.__contains__(key) + except AttributeError:return False + def keys(self): + keys=set() + cur=self + while cur: + keys.update(cur.table.keys()) + cur=getattr(cur,'parent',None) + keys=list(keys) + keys.sort() + return keys + def __str__(self): + return"\n".join(["%r %r"%(x,self.__getitem__(x))for x in self.keys()]) + def __getitem__(self,key): + try: + while 1: + x=self.table.get(key,None) + if not x is None: + return x + self=self.parent + except AttributeError: + return[] + def __setitem__(self,key,value): + self.table[key]=value + def __delitem__(self,key): + self[key]=[] + def __getattr__(self,name): + if name in self.__slots__: + return object.__getattr__(self,name) + else: + return self[name] + def __setattr__(self,name,value): + if name in self.__slots__: + object.__setattr__(self,name,value) + else: + self[name]=value + def __delattr__(self,name): + if name in self.__slots__: + object.__delattr__(self,name) + else: + del self[name] + def derive(self): + newenv=ConfigSet() + newenv.parent=self + return newenv + def detach(self): + tbl=self.get_merged_dict() + try: + delattr(self,'parent') + except AttributeError: + pass + else: + keys=tbl.keys() + for x in keys: + tbl[x]=copy.deepcopy(tbl[x]) + self.table=tbl + def get_flat(self,key): + s=self[key] + if isinstance(s,str):return s + return' '.join(s) + def _get_list_value_for_modification(self,key): + try: + value=self.table[key] + except KeyError: + try:value=self.parent[key] + except AttributeError:value=[] + if isinstance(value,list): + value=value[:] + else: + value=[value] + else: + if not isinstance(value,list): + value=[value] + self.table[key]=value + return value + def append_value(self,var,val): + current_value=self._get_list_value_for_modification(var) + if isinstance(val,str): + val=[val] + current_value.extend(val) + def prepend_value(self,var,val): + if isinstance(val,str): + val=[val] + self.table[var]=val+self._get_list_value_for_modification(var) + def append_unique(self,var,val): + if isinstance(val,str): + val=[val] + current_value=self._get_list_value_for_modification(var) + for x in val: + if x not in current_value: + current_value.append(x) + def get_merged_dict(self): + table_list=[] + env=self + while 1: + table_list.insert(0,env.table) + try:env=env.parent + except AttributeError:break + merged_table={} + for table in table_list: + merged_table.update(table) + return merged_table + def store(self,filename): + try: + os.makedirs(os.path.split(filename)[0]) + except OSError: + pass + buf=[] + merged_table=self.get_merged_dict() + keys=list(merged_table.keys()) + keys.sort() + try: + fun=ascii + except NameError: + fun=repr + for k in keys: + if k!='undo_stack': + buf.append('%s = %s\n'%(k,fun(merged_table[k]))) + Utils.writef(filename,''.join(buf)) + def load(self,filename): + tbl=self.table + code=Utils.readf(filename,m='rU') + for m in re_imp.finditer(code): + g=m.group + tbl[g(2)]=eval(g(3)) + Logs.debug('env: %s'%str(self.table)) + def update(self,d): + for k,v in d.items(): + self[k]=v + def stash(self): + orig=self.table + tbl=self.table=self.table.copy() + for x in tbl.keys(): + tbl[x]=copy.deepcopy(tbl[x]) + self.undo_stack=self.undo_stack+[orig] + def revert(self): + self.table=self.undo_stack.pop(-1) diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Configure.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Configure.py new file mode 100644 index 0000000..6545c87 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Configure.py @@ -0,0 +1,317 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,shlex,sys,time +from waflib import ConfigSet,Utils,Options,Logs,Context,Build,Errors +try: + from urllib import request +except ImportError: + from urllib import urlopen +else: + urlopen=request.urlopen +BREAK='break' +CONTINUE='continue' +WAF_CONFIG_LOG='config.log' +autoconfig=False +conf_template='''# project %(app)s configured on %(now)s by +# waf %(wafver)s (abi %(abi)s, python %(pyver)x on %(systype)s) +# using %(args)s +#''' +def download_check(node): + pass +def download_tool(tool,force=False,ctx=None): + for x in Utils.to_list(Context.remote_repo): + for sub in Utils.to_list(Context.remote_locs): + url='/'.join((x,sub,tool+'.py')) + try: + web=urlopen(url) + try: + if web.getcode()!=200: + continue + except AttributeError: + pass + except Exception: + continue + else: + tmp=ctx.root.make_node(os.sep.join((Context.waf_dir,'waflib','extras',tool+'.py'))) + tmp.write(web.read(),'wb') + Logs.warn('Downloaded %s from %s'%(tool,url)) + download_check(tmp) + try: + module=Context.load_tool(tool) + except Exception: + Logs.warn('The tool %s from %s is unusable'%(tool,url)) + try: + tmp.delete() + except Exception: + pass + continue + return module + raise Errors.WafError('Could not load the Waf tool') +class ConfigurationContext(Context.Context): + '''configures the project''' + cmd='configure' + error_handlers=[] + def __init__(self,**kw): + super(ConfigurationContext,self).__init__(**kw) + self.environ=dict(os.environ) + self.all_envs={} + self.top_dir=None + self.out_dir=None + self.tools=[] + self.hash=0 + self.files=[] + self.tool_cache=[] + self.setenv('') + def setenv(self,name,env=None): + if name not in self.all_envs or env: + if not env: + env=ConfigSet.ConfigSet() + self.prepare_env(env) + else: + env=env.derive() + self.all_envs[name]=env + self.variant=name + def get_env(self): + return self.all_envs[self.variant] + def set_env(self,val): + self.all_envs[self.variant]=val + env=property(get_env,set_env) + def init_dirs(self): + top=self.top_dir + if not top: + top=Options.options.top + if not top: + top=getattr(Context.g_module,Context.TOP,None) + if not top: + top=self.path.abspath() + top=os.path.abspath(top) + self.srcnode=(os.path.isabs(top)and self.root or self.path).find_dir(top) + assert(self.srcnode) + out=self.out_dir + if not out: + out=Options.options.out + if not out: + out=getattr(Context.g_module,Context.OUT,None) + if not out: + out=Options.lockfile.replace('.lock-waf_%s_'%sys.platform,'').replace('.lock-waf','') + self.bldnode=(os.path.isabs(out)and self.root or self.path).make_node(out) + self.bldnode.mkdir() + if not os.path.isdir(self.bldnode.abspath()): + conf.fatal('Could not create the build directory %s'%self.bldnode.abspath()) + def execute(self): + self.init_dirs() + self.cachedir=self.bldnode.make_node(Build.CACHE_DIR) + self.cachedir.mkdir() + path=os.path.join(self.bldnode.abspath(),WAF_CONFIG_LOG) + self.logger=Logs.make_logger(path,'cfg') + app=getattr(Context.g_module,'APPNAME','') + if app: + ver=getattr(Context.g_module,'VERSION','') + if ver: + app="%s (%s)"%(app,ver) + now=time.ctime() + pyver=sys.hexversion + systype=sys.platform + args=" ".join(sys.argv) + wafver=Context.WAFVERSION + abi=Context.ABI + self.to_log(conf_template%vars()) + self.msg('Setting top to',self.srcnode.abspath()) + self.msg('Setting out to',self.bldnode.abspath()) + if id(self.srcnode)==id(self.bldnode): + Logs.warn('Setting top == out (remember to use "update_outputs")') + elif id(self.path)!=id(self.srcnode): + if self.srcnode.is_child_of(self.path): + Logs.warn('Are you certain that you do not want to set top="." ?') + super(ConfigurationContext,self).execute() + self.store() + Context.top_dir=self.srcnode.abspath() + Context.out_dir=self.bldnode.abspath() + env=ConfigSet.ConfigSet() + env['argv']=sys.argv + env['options']=Options.options.__dict__ + env.run_dir=Context.run_dir + env.top_dir=Context.top_dir + env.out_dir=Context.out_dir + env['hash']=self.hash + env['files']=self.files + env['environ']=dict(self.environ) + if not self.env.NO_LOCK_IN_RUN: + env.store(Context.run_dir+os.sep+Options.lockfile) + if not self.env.NO_LOCK_IN_TOP: + env.store(Context.top_dir+os.sep+Options.lockfile) + if not self.env.NO_LOCK_IN_OUT: + env.store(Context.out_dir+os.sep+Options.lockfile) + def prepare_env(self,env): + if not env.PREFIX: + if Options.options.prefix or Utils.is_win32: + env.PREFIX=os.path.abspath(os.path.expanduser(Options.options.prefix)) + else: + env.PREFIX='' + if not env.BINDIR: + env.BINDIR=Utils.subst_vars('${PREFIX}/bin',env) + if not env.LIBDIR: + env.LIBDIR=Utils.subst_vars('${PREFIX}/lib',env) + def store(self): + n=self.cachedir.make_node('build.config.py') + n.write('version = 0x%x\ntools = %r\n'%(Context.HEXVERSION,self.tools)) + if not self.all_envs: + self.fatal('nothing to store in the configuration context!') + for key in self.all_envs: + tmpenv=self.all_envs[key] + tmpenv.store(os.path.join(self.cachedir.abspath(),key+Build.CACHE_SUFFIX)) + def load(self,input,tooldir=None,funs=None,download=True): + tools=Utils.to_list(input) + if tooldir:tooldir=Utils.to_list(tooldir) + for tool in tools: + mag=(tool,id(self.env),funs) + if mag in self.tool_cache: + self.to_log('(tool %s is already loaded, skipping)'%tool) + continue + self.tool_cache.append(mag) + module=None + try: + module=Context.load_tool(tool,tooldir) + except ImportError as e: + if Options.options.download: + module=download_tool(tool,ctx=self) + if not module: + self.fatal('Could not load the Waf tool %r or download a suitable replacement from the repository (sys.path %r)\n%s'%(tool,sys.path,e)) + else: + self.fatal('Could not load the Waf tool %r from %r (try the --download option?):\n%s'%(tool,sys.path,e)) + except Exception as e: + self.to_log('imp %r (%r & %r)'%(tool,tooldir,funs)) + self.to_log(Utils.ex_stack()) + raise + if funs is not None: + self.eval_rules(funs) + else: + func=getattr(module,'configure',None) + if func: + if type(func)is type(Utils.readf):func(self) + else:self.eval_rules(func) + self.tools.append({'tool':tool,'tooldir':tooldir,'funs':funs}) + def post_recurse(self,node): + super(ConfigurationContext,self).post_recurse(node) + self.hash=Utils.h_list((self.hash,node.read('rb'))) + self.files.append(node.abspath()) + def eval_rules(self,rules): + self.rules=Utils.to_list(rules) + for x in self.rules: + f=getattr(self,x) + if not f:self.fatal("No such method '%s'."%x) + try: + f() + except Exception as e: + ret=self.err_handler(x,e) + if ret==BREAK: + break + elif ret==CONTINUE: + continue + else: + raise + def err_handler(self,fun,error): + pass +def conf(f): + def fun(*k,**kw): + mandatory=True + if'mandatory'in kw: + mandatory=kw['mandatory'] + del kw['mandatory'] + try: + return f(*k,**kw) + except Errors.ConfigurationError: + if mandatory: + raise + setattr(ConfigurationContext,f.__name__,fun) + setattr(Build.BuildContext,f.__name__,fun) + return f +@conf +def add_os_flags(self,var,dest=None): + try:self.env.append_value(dest or var,shlex.split(self.environ[var])) + except KeyError:pass +@conf +def cmd_to_list(self,cmd): + if isinstance(cmd,str)and cmd.find(' '): + try: + os.stat(cmd) + except OSError: + return shlex.split(cmd) + else: + return[cmd] + return cmd +@conf +def check_waf_version(self,mini='1.6.99',maxi='1.8.0'): + self.start_msg('Checking for waf version in %s-%s'%(str(mini),str(maxi))) + ver=Context.HEXVERSION + if Utils.num2ver(mini)>ver: + self.fatal('waf version should be at least %r (%r found)'%(Utils.num2ver(mini),ver)) + if Utils.num2ver(maxi)<ver: + self.fatal('waf version should be at most %r (%r found)'%(Utils.num2ver(maxi),ver)) + self.end_msg('ok') +@conf +def find_file(self,filename,path_list=[]): + for n in Utils.to_list(filename): + for d in Utils.to_list(path_list): + p=os.path.join(d,n) + if os.path.exists(p): + return p + self.fatal('Could not find %r'%filename) +@conf +def find_program(self,filename,**kw): + exts=kw.get('exts',Utils.is_win32 and'.exe,.com,.bat,.cmd'or',.sh,.pl,.py') + environ=kw.get('environ',os.environ) + ret='' + filename=Utils.to_list(filename) + var=kw.get('var','') + if not var: + var=filename[0].upper() + if self.env[var]: + ret=self.env[var] + elif var in environ: + ret=environ[var] + path_list=kw.get('path_list','') + if not ret: + if path_list: + path_list=Utils.to_list(path_list) + else: + path_list=environ.get('PATH','').split(os.pathsep) + if not isinstance(filename,list): + filename=[filename] + for a in exts.split(','): + if ret: + break + for b in filename: + if ret: + break + for c in path_list: + if ret: + break + x=os.path.expanduser(os.path.join(c,b+a)) + if os.path.isfile(x): + ret=x + if not ret and Utils.winreg: + ret=Utils.get_registry_app_path(Utils.winreg.HKEY_CURRENT_USER,filename) + if not ret and Utils.winreg: + ret=Utils.get_registry_app_path(Utils.winreg.HKEY_LOCAL_MACHINE,filename) + self.msg('Checking for program '+','.join(filename),ret or False) + self.to_log('find program=%r paths=%r var=%r -> %r'%(filename,path_list,var,ret)) + if not ret: + self.fatal(kw.get('errmsg','')or'Could not find the program %s'%','.join(filename)) + if var: + self.env[var]=ret + return ret +@conf +def find_perl_program(self,filename,path_list=[],var=None,environ=None,exts=''): + try: + app=self.find_program(filename,path_list=path_list,var=var,environ=environ,exts=exts) + except Exception: + self.find_program('perl',var='PERL') + app=self.find_file(filename,os.environ['PATH'].split(os.pathsep)) + if not app: + raise + if var: + self.env[var]=Utils.to_list(self.env['PERL'])+[app] + self.msg('Checking for %r'%filename,app) diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Context.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Context.py new file mode 100644 index 0000000..cc163f3 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Context.py @@ -0,0 +1,316 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,imp,sys +from waflib import Utils,Errors,Logs +import waflib.Node +HEXVERSION=0x1071000 +WAFVERSION="1.7.16" +WAFREVISION="73c1705078f8c9c51a33e20f221a309d5a94b5e1" +ABI=98 +DBFILE='.wafpickle-%s-%d-%d'%(sys.platform,sys.hexversion,ABI) +APPNAME='APPNAME' +VERSION='VERSION' +TOP='top' +OUT='out' +WSCRIPT_FILE='wscript' +launch_dir='' +run_dir='' +top_dir='' +out_dir='' +waf_dir='' +local_repo='' +remote_repo='http://waf.googlecode.com/git/' +remote_locs=['waflib/extras','waflib/Tools'] +g_module=None +STDOUT=1 +STDERR=-1 +BOTH=0 +classes=[] +def create_context(cmd_name,*k,**kw): + global classes + for x in classes: + if x.cmd==cmd_name: + return x(*k,**kw) + ctx=Context(*k,**kw) + ctx.fun=cmd_name + return ctx +class store_context(type): + def __init__(cls,name,bases,dict): + super(store_context,cls).__init__(name,bases,dict) + name=cls.__name__ + if name=='ctx'or name=='Context': + return + try: + cls.cmd + except AttributeError: + raise Errors.WafError('Missing command for the context class %r (cmd)'%name) + if not getattr(cls,'fun',None): + cls.fun=cls.cmd + global classes + classes.insert(0,cls) +ctx=store_context('ctx',(object,),{}) +class Context(ctx): + errors=Errors + tools={} + def __init__(self,**kw): + try: + rd=kw['run_dir'] + except KeyError: + global run_dir + rd=run_dir + self.node_class=type("Nod3",(waflib.Node.Node,),{}) + self.node_class.__module__="waflib.Node" + self.node_class.ctx=self + self.root=self.node_class('',None) + self.cur_script=None + self.path=self.root.find_dir(rd) + self.stack_path=[] + self.exec_dict={'ctx':self,'conf':self,'bld':self,'opt':self} + self.logger=None + def __hash__(self): + return id(self) + def load(self,tool_list,*k,**kw): + tools=Utils.to_list(tool_list) + path=Utils.to_list(kw.get('tooldir','')) + for t in tools: + module=load_tool(t,path) + fun=getattr(module,kw.get('name',self.fun),None) + if fun: + fun(self) + def execute(self): + global g_module + self.recurse([os.path.dirname(g_module.root_path)]) + def pre_recurse(self,node): + self.stack_path.append(self.cur_script) + self.cur_script=node + self.path=node.parent + def post_recurse(self,node): + self.cur_script=self.stack_path.pop() + if self.cur_script: + self.path=self.cur_script.parent + def recurse(self,dirs,name=None,mandatory=True,once=True): + try: + cache=self.recurse_cache + except AttributeError: + cache=self.recurse_cache={} + for d in Utils.to_list(dirs): + if not os.path.isabs(d): + d=os.path.join(self.path.abspath(),d) + WSCRIPT=os.path.join(d,WSCRIPT_FILE) + WSCRIPT_FUN=WSCRIPT+'_'+(name or self.fun) + node=self.root.find_node(WSCRIPT_FUN) + if node and(not once or node not in cache): + cache[node]=True + self.pre_recurse(node) + try: + function_code=node.read('rU') + exec(compile(function_code,node.abspath(),'exec'),self.exec_dict) + finally: + self.post_recurse(node) + elif not node: + node=self.root.find_node(WSCRIPT) + tup=(node,name or self.fun) + if node and(not once or tup not in cache): + cache[tup]=True + self.pre_recurse(node) + try: + wscript_module=load_module(node.abspath()) + user_function=getattr(wscript_module,(name or self.fun),None) + if not user_function: + if not mandatory: + continue + raise Errors.WafError('No function %s defined in %s'%(name or self.fun,node.abspath())) + user_function(self) + finally: + self.post_recurse(node) + elif not node: + if not mandatory: + continue + raise Errors.WafError('No wscript file in directory %s'%d) + def exec_command(self,cmd,**kw): + subprocess=Utils.subprocess + kw['shell']=isinstance(cmd,str) + Logs.debug('runner: %r'%cmd) + Logs.debug('runner_env: kw=%s'%kw) + if self.logger: + self.logger.info(cmd) + if'stdout'not in kw: + kw['stdout']=subprocess.PIPE + if'stderr'not in kw: + kw['stderr']=subprocess.PIPE + try: + if kw['stdout']or kw['stderr']: + p=subprocess.Popen(cmd,**kw) + (out,err)=p.communicate() + ret=p.returncode + else: + out,err=(None,None) + ret=subprocess.Popen(cmd,**kw).wait() + except Exception as e: + raise Errors.WafError('Execution failure: %s'%str(e),ex=e) + if out: + if not isinstance(out,str): + out=out.decode(sys.stdout.encoding or'iso8859-1') + if self.logger: + self.logger.debug('out: %s'%out) + else: + sys.stdout.write(out) + if err: + if not isinstance(err,str): + err=err.decode(sys.stdout.encoding or'iso8859-1') + if self.logger: + self.logger.error('err: %s'%err) + else: + sys.stderr.write(err) + return ret + def cmd_and_log(self,cmd,**kw): + subprocess=Utils.subprocess + kw['shell']=isinstance(cmd,str) + Logs.debug('runner: %r'%cmd) + if'quiet'in kw: + quiet=kw['quiet'] + del kw['quiet'] + else: + quiet=None + if'output'in kw: + to_ret=kw['output'] + del kw['output'] + else: + to_ret=STDOUT + kw['stdout']=kw['stderr']=subprocess.PIPE + if quiet is None: + self.to_log(cmd) + try: + p=subprocess.Popen(cmd,**kw) + (out,err)=p.communicate() + except Exception as e: + raise Errors.WafError('Execution failure: %s'%str(e),ex=e) + if not isinstance(out,str): + out=out.decode(sys.stdout.encoding or'iso8859-1') + if not isinstance(err,str): + err=err.decode(sys.stdout.encoding or'iso8859-1') + if out and quiet!=STDOUT and quiet!=BOTH: + self.to_log('out: %s'%out) + if err and quiet!=STDERR and quiet!=BOTH: + self.to_log('err: %s'%err) + if p.returncode: + e=Errors.WafError('Command %r returned %r'%(cmd,p.returncode)) + e.returncode=p.returncode + e.stderr=err + e.stdout=out + raise e + if to_ret==BOTH: + return(out,err) + elif to_ret==STDERR: + return err + return out + def fatal(self,msg,ex=None): + if self.logger: + self.logger.info('from %s: %s'%(self.path.abspath(),msg)) + try: + msg='%s\n(complete log in %s)'%(msg,self.logger.handlers[0].baseFilename) + except Exception: + pass + raise self.errors.ConfigurationError(msg,ex=ex) + def to_log(self,msg): + if not msg: + return + if self.logger: + self.logger.info(msg) + else: + sys.stderr.write(str(msg)) + sys.stderr.flush() + def msg(self,msg,result,color=None): + self.start_msg(msg) + if not isinstance(color,str): + color=result and'GREEN'or'YELLOW' + self.end_msg(result,color) + def start_msg(self,msg): + try: + if self.in_msg: + self.in_msg+=1 + return + except AttributeError: + self.in_msg=0 + self.in_msg+=1 + try: + self.line_just=max(self.line_just,len(msg)) + except AttributeError: + self.line_just=max(40,len(msg)) + for x in(self.line_just*'-',msg): + self.to_log(x) + Logs.pprint('NORMAL',"%s :"%msg.ljust(self.line_just),sep='') + def end_msg(self,result,color=None): + self.in_msg-=1 + if self.in_msg: + return + defcolor='GREEN' + if result==True: + msg='ok' + elif result==False: + msg='not found' + defcolor='YELLOW' + else: + msg=str(result) + self.to_log(msg) + Logs.pprint(color or defcolor,msg) + def load_special_tools(self,var,ban=[]): + global waf_dir + lst=self.root.find_node(waf_dir).find_node('waflib/extras').ant_glob(var) + for x in lst: + if not x.name in ban: + load_tool(x.name.replace('.py','')) +cache_modules={} +def load_module(path): + try: + return cache_modules[path] + except KeyError: + pass + module=imp.new_module(WSCRIPT_FILE) + try: + code=Utils.readf(path,m='rU') + except(IOError,OSError): + raise Errors.WafError('Could not read the file %r'%path) + module_dir=os.path.dirname(path) + sys.path.insert(0,module_dir) + exec(compile(code,path,'exec'),module.__dict__) + sys.path.remove(module_dir) + cache_modules[path]=module + return module +def load_tool(tool,tooldir=None): + if tool=='java': + tool='javaw' + elif tool=='compiler_cc': + tool='compiler_c' + else: + tool=tool.replace('++','xx') + if tooldir: + assert isinstance(tooldir,list) + sys.path=tooldir+sys.path + try: + __import__(tool) + ret=sys.modules[tool] + Context.tools[tool]=ret + return ret + finally: + for d in tooldir: + sys.path.remove(d) + else: + global waf_dir + try: + os.stat(os.path.join(waf_dir,'waflib','extras',tool+'.py')) + except OSError: + try: + os.stat(os.path.join(waf_dir,'waflib','Tools',tool+'.py')) + except OSError: + d=tool + else: + d='waflib.Tools.%s'%tool + else: + d='waflib.extras.%s'%tool + __import__(d) + ret=sys.modules[d] + Context.tools[tool]=ret + return ret diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Errors.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Errors.py new file mode 100644 index 0000000..aacc1a9 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Errors.py @@ -0,0 +1,37 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import traceback,sys +class WafError(Exception): + def __init__(self,msg='',ex=None): + self.msg=msg + assert not isinstance(msg,Exception) + self.stack=[] + if ex: + if not msg: + self.msg=str(ex) + if isinstance(ex,WafError): + self.stack=ex.stack + else: + self.stack=traceback.extract_tb(sys.exc_info()[2]) + self.stack+=traceback.extract_stack()[:-1] + self.verbose_msg=''.join(traceback.format_list(self.stack)) + def __str__(self): + return str(self.msg) +class BuildError(WafError): + def __init__(self,error_tasks=[]): + self.tasks=error_tasks + WafError.__init__(self,self.format_error()) + def format_error(self): + lst=['Build failed'] + for tsk in self.tasks: + txt=tsk.format_error() + if txt:lst.append(txt) + return'\n'.join(lst) +class ConfigurationError(WafError): + pass +class TaskRescan(WafError): + pass +class TaskNotReady(WafError): + pass diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Logs.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Logs.py new file mode 100644 index 0000000..ef62452 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Logs.py @@ -0,0 +1,177 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,re,traceback,sys +_nocolor=os.environ.get('NOCOLOR','no')not in('no','0','false') +try: + if not _nocolor: + import waflib.ansiterm +except ImportError: + pass +try: + import threading +except ImportError: + if not'JOBS'in os.environ: + os.environ['JOBS']='1' +else: + wlock=threading.Lock() + class sync_stream(object): + def __init__(self,stream): + self.stream=stream + self.encoding=self.stream.encoding + def write(self,txt): + try: + wlock.acquire() + self.stream.write(txt) + self.stream.flush() + finally: + wlock.release() + def fileno(self): + return self.stream.fileno() + def flush(self): + self.stream.flush() + def isatty(self): + return self.stream.isatty() + if not os.environ.get('NOSYNC',False): + if id(sys.stdout)==id(sys.__stdout__): + sys.stdout=sync_stream(sys.stdout) + sys.stderr=sync_stream(sys.stderr) +import logging +LOG_FORMAT="%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s" +HOUR_FORMAT="%H:%M:%S" +zones='' +verbose=0 +colors_lst={'USE':True,'BOLD':'\x1b[01;1m','RED':'\x1b[01;31m','GREEN':'\x1b[32m','YELLOW':'\x1b[33m','PINK':'\x1b[35m','BLUE':'\x1b[01;34m','CYAN':'\x1b[36m','NORMAL':'\x1b[0m','cursor_on':'\x1b[?25h','cursor_off':'\x1b[?25l',} +got_tty=not os.environ.get('TERM','dumb')in['dumb','emacs'] +if got_tty: + try: + got_tty=sys.stderr.isatty()and sys.stdout.isatty() + except AttributeError: + got_tty=False +if(not got_tty and os.environ.get('TERM','dumb')!='msys')or _nocolor: + colors_lst['USE']=False +def get_term_cols(): + return 80 +try: + import struct,fcntl,termios +except ImportError: + pass +else: + if got_tty: + def get_term_cols_real(): + dummy_lines,cols=struct.unpack("HHHH",fcntl.ioctl(sys.stderr.fileno(),termios.TIOCGWINSZ,struct.pack("HHHH",0,0,0,0)))[:2] + return cols + try: + get_term_cols_real() + except Exception: + pass + else: + get_term_cols=get_term_cols_real +get_term_cols.__doc__=""" + Get the console width in characters. + + :return: the number of characters per line + :rtype: int + """ +def get_color(cl): + if not colors_lst['USE']:return'' + return colors_lst.get(cl,'') +class color_dict(object): + def __getattr__(self,a): + return get_color(a) + def __call__(self,a): + return get_color(a) +colors=color_dict() +re_log=re.compile(r'(\w+): (.*)',re.M) +class log_filter(logging.Filter): + def __init__(self,name=None): + pass + def filter(self,rec): + rec.c1=colors.PINK + rec.c2=colors.NORMAL + rec.zone=rec.module + if rec.levelno>=logging.INFO: + if rec.levelno>=logging.ERROR: + rec.c1=colors.RED + elif rec.levelno>=logging.WARNING: + rec.c1=colors.YELLOW + else: + rec.c1=colors.GREEN + return True + m=re_log.match(rec.msg) + if m: + rec.zone=m.group(1) + rec.msg=m.group(2) + if zones: + return getattr(rec,'zone','')in zones or'*'in zones + elif not verbose>2: + return False + return True +class formatter(logging.Formatter): + def __init__(self): + logging.Formatter.__init__(self,LOG_FORMAT,HOUR_FORMAT) + def format(self,rec): + if rec.levelno>=logging.WARNING or rec.levelno==logging.INFO: + try: + msg=rec.msg.decode('utf-8') + except Exception: + msg=rec.msg + return'%s%s%s'%(rec.c1,msg,rec.c2) + return logging.Formatter.format(self,rec) +log=None +def debug(*k,**kw): + if verbose: + k=list(k) + k[0]=k[0].replace('\n',' ') + global log + log.debug(*k,**kw) +def error(*k,**kw): + global log + log.error(*k,**kw) + if verbose>2: + st=traceback.extract_stack() + if st: + st=st[:-1] + buf=[] + for filename,lineno,name,line in st: + buf.append(' File "%s", line %d, in %s'%(filename,lineno,name)) + if line: + buf.append(' %s'%line.strip()) + if buf:log.error("\n".join(buf)) +def warn(*k,**kw): + global log + log.warn(*k,**kw) +def info(*k,**kw): + global log + log.info(*k,**kw) +def init_log(): + global log + log=logging.getLogger('waflib') + log.handlers=[] + log.filters=[] + hdlr=logging.StreamHandler() + hdlr.setFormatter(formatter()) + log.addHandler(hdlr) + log.addFilter(log_filter()) + log.setLevel(logging.DEBUG) +def make_logger(path,name): + logger=logging.getLogger(name) + hdlr=logging.FileHandler(path,'w') + formatter=logging.Formatter('%(message)s') + hdlr.setFormatter(formatter) + logger.addHandler(hdlr) + logger.setLevel(logging.DEBUG) + return logger +def make_mem_logger(name,to_log,size=10000): + from logging.handlers import MemoryHandler + logger=logging.getLogger(name) + hdlr=MemoryHandler(size,target=to_log) + formatter=logging.Formatter('%(message)s') + hdlr.setFormatter(formatter) + logger.addHandler(hdlr) + logger.memhandler=hdlr + logger.setLevel(logging.DEBUG) + return logger +def pprint(col,str,label='',sep='\n'): + sys.stderr.write("%s%s%s %s%s"%(colors(col),str,colors.NORMAL,label,sep)) diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Node.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Node.py new file mode 100644 index 0000000..edaf314 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Node.py @@ -0,0 +1,467 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,re,sys,shutil +from waflib import Utils,Errors +exclude_regs=''' +**/*~ +**/#*# +**/.#* +**/%*% +**/._* +**/CVS +**/CVS/** +**/.cvsignore +**/SCCS +**/SCCS/** +**/vssver.scc +**/.svn +**/.svn/** +**/BitKeeper +**/.git +**/.git/** +**/.gitignore +**/.bzr +**/.bzrignore +**/.bzr/** +**/.hg +**/.hg/** +**/_MTN +**/_MTN/** +**/.arch-ids +**/{arch} +**/_darcs +**/_darcs/** +**/.intlcache +**/.DS_Store''' +def split_path(path): + return path.split('/') +def split_path_cygwin(path): + if path.startswith('//'): + ret=path.split('/')[2:] + ret[0]='/'+ret[0] + return ret + return path.split('/') +re_sp=re.compile('[/\\\\]') +def split_path_win32(path): + if path.startswith('\\\\'): + ret=re.split(re_sp,path)[2:] + ret[0]='\\'+ret[0] + return ret + return re.split(re_sp,path) +if sys.platform=='cygwin': + split_path=split_path_cygwin +elif Utils.is_win32: + split_path=split_path_win32 +class Node(object): + __slots__=('name','sig','children','parent','cache_abspath','cache_isdir','cache_sig') + def __init__(self,name,parent): + self.name=name + self.parent=parent + if parent: + if name in parent.children: + raise Errors.WafError('node %s exists in the parent files %r already'%(name,parent)) + parent.children[name]=self + def __setstate__(self,data): + self.name=data[0] + self.parent=data[1] + if data[2]is not None: + self.children=data[2] + if data[3]is not None: + self.sig=data[3] + def __getstate__(self): + return(self.name,self.parent,getattr(self,'children',None),getattr(self,'sig',None)) + def __str__(self): + return self.name + def __repr__(self): + return self.abspath() + def __hash__(self): + return id(self) + def __eq__(self,node): + return id(self)==id(node) + def __copy__(self): + raise Errors.WafError('nodes are not supposed to be copied') + def read(self,flags='r',encoding='ISO8859-1'): + return Utils.readf(self.abspath(),flags,encoding) + def write(self,data,flags='w',encoding='ISO8859-1'): + Utils.writef(self.abspath(),data,flags,encoding) + def chmod(self,val): + os.chmod(self.abspath(),val) + def delete(self): + try: + if hasattr(self,'children'): + shutil.rmtree(self.abspath()) + else: + os.remove(self.abspath()) + except OSError: + pass + self.evict() + def evict(self): + del self.parent.children[self.name] + def suffix(self): + k=max(0,self.name.rfind('.')) + return self.name[k:] + def height(self): + d=self + val=-1 + while d: + d=d.parent + val+=1 + return val + def listdir(self): + lst=Utils.listdir(self.abspath()) + lst.sort() + return lst + def mkdir(self): + if getattr(self,'cache_isdir',None): + return + try: + self.parent.mkdir() + except OSError: + pass + if self.name: + try: + os.makedirs(self.abspath()) + except OSError: + pass + if not os.path.isdir(self.abspath()): + raise Errors.WafError('Could not create the directory %s'%self.abspath()) + try: + self.children + except AttributeError: + self.children={} + self.cache_isdir=True + def find_node(self,lst): + if isinstance(lst,str): + lst=[x for x in split_path(lst)if x and x!='.'] + cur=self + for x in lst: + if x=='..': + cur=cur.parent or cur + continue + try: + ch=cur.children + except AttributeError: + cur.children={} + else: + try: + cur=cur.children[x] + continue + except KeyError: + pass + cur=self.__class__(x,cur) + try: + os.stat(cur.abspath()) + except OSError: + cur.evict() + return None + ret=cur + try: + os.stat(ret.abspath()) + except OSError: + ret.evict() + return None + try: + while not getattr(cur.parent,'cache_isdir',None): + cur=cur.parent + cur.cache_isdir=True + except AttributeError: + pass + return ret + def make_node(self,lst): + if isinstance(lst,str): + lst=[x for x in split_path(lst)if x and x!='.'] + cur=self + for x in lst: + if x=='..': + cur=cur.parent or cur + continue + if getattr(cur,'children',{}): + if x in cur.children: + cur=cur.children[x] + continue + else: + cur.children={} + cur=self.__class__(x,cur) + return cur + def search_node(self,lst): + if isinstance(lst,str): + lst=[x for x in split_path(lst)if x and x!='.'] + cur=self + for x in lst: + if x=='..': + cur=cur.parent or cur + else: + try: + cur=cur.children[x] + except(AttributeError,KeyError): + return None + return cur + def path_from(self,node): + c1=self + c2=node + c1h=c1.height() + c2h=c2.height() + lst=[] + up=0 + while c1h>c2h: + lst.append(c1.name) + c1=c1.parent + c1h-=1 + while c2h>c1h: + up+=1 + c2=c2.parent + c2h-=1 + while id(c1)!=id(c2): + lst.append(c1.name) + up+=1 + c1=c1.parent + c2=c2.parent + for i in range(up): + lst.append('..') + lst.reverse() + return os.sep.join(lst)or'.' + def abspath(self): + try: + return self.cache_abspath + except AttributeError: + pass + if os.sep=='/': + if not self.parent: + val=os.sep + elif not self.parent.name: + val=os.sep+self.name + else: + val=self.parent.abspath()+os.sep+self.name + else: + if not self.parent: + val='' + elif not self.parent.name: + val=self.name+os.sep + else: + val=self.parent.abspath().rstrip(os.sep)+os.sep+self.name + self.cache_abspath=val + return val + def is_child_of(self,node): + p=self + diff=self.height()-node.height() + while diff>0: + diff-=1 + p=p.parent + return id(p)==id(node) + def ant_iter(self,accept=None,maxdepth=25,pats=[],dir=False,src=True,remove=True): + dircont=self.listdir() + dircont.sort() + try: + lst=set(self.children.keys()) + except AttributeError: + self.children={} + else: + if remove: + for x in lst-set(dircont): + self.children[x].evict() + for name in dircont: + npats=accept(name,pats) + if npats and npats[0]: + accepted=[]in npats[0] + node=self.make_node([name]) + isdir=os.path.isdir(node.abspath()) + if accepted: + if isdir: + if dir: + yield node + else: + if src: + yield node + if getattr(node,'cache_isdir',None)or isdir: + node.cache_isdir=True + if maxdepth: + for k in node.ant_iter(accept=accept,maxdepth=maxdepth-1,pats=npats,dir=dir,src=src,remove=remove): + yield k + raise StopIteration + def ant_glob(self,*k,**kw): + src=kw.get('src',True) + dir=kw.get('dir',False) + excl=kw.get('excl',exclude_regs) + incl=k and k[0]or kw.get('incl','**') + reflags=kw.get('ignorecase',0)and re.I + def to_pat(s): + lst=Utils.to_list(s) + ret=[] + for x in lst: + x=x.replace('\\','/').replace('//','/') + if x.endswith('/'): + x+='**' + lst2=x.split('/') + accu=[] + for k in lst2: + if k=='**': + accu.append(k) + else: + k=k.replace('.','[.]').replace('*','.*').replace('?','.').replace('+','\\+') + k='^%s$'%k + try: + accu.append(re.compile(k,flags=reflags)) + except Exception as e: + raise Errors.WafError("Invalid pattern: %s"%k,e) + ret.append(accu) + return ret + def filtre(name,nn): + ret=[] + for lst in nn: + if not lst: + pass + elif lst[0]=='**': + ret.append(lst) + if len(lst)>1: + if lst[1].match(name): + ret.append(lst[2:]) + else: + ret.append([]) + elif lst[0].match(name): + ret.append(lst[1:]) + return ret + def accept(name,pats): + nacc=filtre(name,pats[0]) + nrej=filtre(name,pats[1]) + if[]in nrej: + nacc=[] + return[nacc,nrej] + ret=[x for x in self.ant_iter(accept=accept,pats=[to_pat(incl),to_pat(excl)],maxdepth=kw.get('maxdepth',25),dir=dir,src=src,remove=kw.get('remove',True))] + if kw.get('flat',False): + return' '.join([x.path_from(self)for x in ret]) + return ret + def is_src(self): + cur=self + x=id(self.ctx.srcnode) + y=id(self.ctx.bldnode) + while cur.parent: + if id(cur)==y: + return False + if id(cur)==x: + return True + cur=cur.parent + return False + def is_bld(self): + cur=self + y=id(self.ctx.bldnode) + while cur.parent: + if id(cur)==y: + return True + cur=cur.parent + return False + def get_src(self): + cur=self + x=id(self.ctx.srcnode) + y=id(self.ctx.bldnode) + lst=[] + while cur.parent: + if id(cur)==y: + lst.reverse() + return self.ctx.srcnode.make_node(lst) + if id(cur)==x: + return self + lst.append(cur.name) + cur=cur.parent + return self + def get_bld(self): + cur=self + x=id(self.ctx.srcnode) + y=id(self.ctx.bldnode) + lst=[] + while cur.parent: + if id(cur)==y: + return self + if id(cur)==x: + lst.reverse() + return self.ctx.bldnode.make_node(lst) + lst.append(cur.name) + cur=cur.parent + lst.reverse() + if lst and Utils.is_win32 and len(lst[0])==2 and lst[0].endswith(':'): + lst[0]=lst[0][0] + return self.ctx.bldnode.make_node(['__root__']+lst) + def find_resource(self,lst): + if isinstance(lst,str): + lst=[x for x in split_path(lst)if x and x!='.'] + node=self.get_bld().search_node(lst) + if not node: + self=self.get_src() + node=self.find_node(lst) + if node: + if os.path.isdir(node.abspath()): + return None + return node + def find_or_declare(self,lst): + if isinstance(lst,str): + lst=[x for x in split_path(lst)if x and x!='.'] + node=self.get_bld().search_node(lst) + if node: + if not os.path.isfile(node.abspath()): + node.sig=None + node.parent.mkdir() + return node + self=self.get_src() + node=self.find_node(lst) + if node: + if not os.path.isfile(node.abspath()): + node.sig=None + node.parent.mkdir() + return node + node=self.get_bld().make_node(lst) + node.parent.mkdir() + return node + def find_dir(self,lst): + if isinstance(lst,str): + lst=[x for x in split_path(lst)if x and x!='.'] + node=self.find_node(lst) + try: + if not os.path.isdir(node.abspath()): + return None + except(OSError,AttributeError): + return None + return node + def change_ext(self,ext,ext_in=None): + name=self.name + if ext_in is None: + k=name.rfind('.') + if k>=0: + name=name[:k]+ext + else: + name=name+ext + else: + name=name[:-len(ext_in)]+ext + return self.parent.find_or_declare([name]) + def nice_path(self,env=None): + return self.path_from(self.ctx.launch_node()) + def bldpath(self): + return self.path_from(self.ctx.bldnode) + def srcpath(self): + return self.path_from(self.ctx.srcnode) + def relpath(self): + cur=self + x=id(self.ctx.bldnode) + while cur.parent: + if id(cur)==x: + return self.bldpath() + cur=cur.parent + return self.srcpath() + def bld_dir(self): + return self.parent.bldpath() + def bld_base(self): + s=os.path.splitext(self.name)[0] + return self.bld_dir()+os.sep+s + def get_bld_sig(self): + try: + return self.cache_sig + except AttributeError: + pass + if not self.is_bld()or self.ctx.bldnode is self.ctx.srcnode: + self.sig=Utils.h_file(self.abspath()) + self.cache_sig=ret=self.sig + return ret + search=search_node +pickle_lock=Utils.threading.Lock() +class Nod3(Node): + pass diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Options.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Options.py new file mode 100644 index 0000000..21f4254 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Options.py @@ -0,0 +1,135 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,tempfile,optparse,sys,re +from waflib import Logs,Utils,Context +cmds='distclean configure build install clean uninstall check dist distcheck'.split() +options={} +commands=[] +lockfile=os.environ.get('WAFLOCK','.lock-waf_%s_build'%sys.platform) +try:cache_global=os.path.abspath(os.environ['WAFCACHE']) +except KeyError:cache_global='' +platform=Utils.unversioned_sys_platform() +class opt_parser(optparse.OptionParser): + def __init__(self,ctx): + optparse.OptionParser.__init__(self,conflict_handler="resolve",version='waf %s (%s)'%(Context.WAFVERSION,Context.WAFREVISION)) + self.formatter.width=Logs.get_term_cols() + p=self.add_option + self.ctx=ctx + jobs=ctx.jobs() + p('-j','--jobs',dest='jobs',default=jobs,type='int',help='amount of parallel jobs (%r)'%jobs) + p('-k','--keep',dest='keep',default=0,action='count',help='keep running happily even if errors are found') + p('-v','--verbose',dest='verbose',default=0,action='count',help='verbosity level -v -vv or -vvv [default: 0]') + p('--nocache',dest='nocache',default=False,action='store_true',help='ignore the WAFCACHE (if set)') + p('--zones',dest='zones',default='',action='store',help='debugging zones (task_gen, deps, tasks, etc)') + gr=optparse.OptionGroup(self,'configure options') + self.add_option_group(gr) + gr.add_option('-o','--out',action='store',default='',help='build dir for the project',dest='out') + gr.add_option('-t','--top',action='store',default='',help='src dir for the project',dest='top') + default_prefix=os.environ.get('PREFIX') + if not default_prefix: + if platform=='win32': + d=tempfile.gettempdir() + default_prefix=d[0].upper()+d[1:] + else: + default_prefix='/usr/local/' + gr.add_option('--prefix',dest='prefix',default=default_prefix,help='installation prefix [default: %r]'%default_prefix) + gr.add_option('--download',dest='download',default=False,action='store_true',help='try to download the tools if missing') + gr=optparse.OptionGroup(self,'build and install options') + self.add_option_group(gr) + gr.add_option('-p','--progress',dest='progress_bar',default=0,action='count',help='-p: progress bar; -pp: ide output') + gr.add_option('--targets',dest='targets',default='',action='store',help='task generators, e.g. "target1,target2"') + gr=optparse.OptionGroup(self,'step options') + self.add_option_group(gr) + gr.add_option('--files',dest='files',default='',action='store',help='files to process, by regexp, e.g. "*/main.c,*/test/main.o"') + default_destdir=os.environ.get('DESTDIR','') + gr=optparse.OptionGroup(self,'install/uninstall options') + self.add_option_group(gr) + gr.add_option('--destdir',help='installation root [default: %r]'%default_destdir,default=default_destdir,dest='destdir') + gr.add_option('-f','--force',dest='force',default=False,action='store_true',help='force file installation') + gr.add_option('--distcheck-args',help='arguments to pass to distcheck',default=None,action='store') + def get_usage(self): + cmds_str={} + for cls in Context.classes: + if not cls.cmd or cls.cmd=='options': + continue + s=cls.__doc__ or'' + cmds_str[cls.cmd]=s + if Context.g_module: + for(k,v)in Context.g_module.__dict__.items(): + if k in['options','init','shutdown']: + continue + if type(v)is type(Context.create_context): + if v.__doc__ and not k.startswith('_'): + cmds_str[k]=v.__doc__ + just=0 + for k in cmds_str: + just=max(just,len(k)) + lst=[' %s: %s'%(k.ljust(just),v)for(k,v)in cmds_str.items()] + lst.sort() + ret='\n'.join(lst) + return'''waf [commands] [options] + +Main commands (example: ./waf build -j4) +%s +'''%ret +class OptionsContext(Context.Context): + cmd='options' + fun='options' + def __init__(self,**kw): + super(OptionsContext,self).__init__(**kw) + self.parser=opt_parser(self) + self.option_groups={} + def jobs(self): + count=int(os.environ.get('JOBS',0)) + if count<1: + if'NUMBER_OF_PROCESSORS'in os.environ: + count=int(os.environ.get('NUMBER_OF_PROCESSORS',1)) + else: + if hasattr(os,'sysconf_names'): + if'SC_NPROCESSORS_ONLN'in os.sysconf_names: + count=int(os.sysconf('SC_NPROCESSORS_ONLN')) + elif'SC_NPROCESSORS_CONF'in os.sysconf_names: + count=int(os.sysconf('SC_NPROCESSORS_CONF')) + if not count and os.name not in('nt','java'): + try: + tmp=self.cmd_and_log(['sysctl','-n','hw.ncpu'],quiet=0) + except Exception: + pass + else: + if re.match('^[0-9]+$',tmp): + count=int(tmp) + if count<1: + count=1 + elif count>1024: + count=1024 + return count + def add_option(self,*k,**kw): + return self.parser.add_option(*k,**kw) + def add_option_group(self,*k,**kw): + try: + gr=self.option_groups[k[0]] + except KeyError: + gr=self.parser.add_option_group(*k,**kw) + self.option_groups[k[0]]=gr + return gr + def get_option_group(self,opt_str): + try: + return self.option_groups[opt_str] + except KeyError: + for group in self.parser.option_groups: + if group.title==opt_str: + return group + return None + def parse_args(self,_args=None): + global options,commands + (options,leftover_args)=self.parser.parse_args(args=_args) + commands=leftover_args + if options.destdir: + options.destdir=os.path.abspath(os.path.expanduser(options.destdir)) + if options.verbose>=1: + self.load('errcheck') + def execute(self): + super(OptionsContext,self).execute() + self.parse_args() diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Runner.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Runner.py new file mode 100644 index 0000000..e43ec71 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Runner.py @@ -0,0 +1,197 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import random,atexit +try: + from queue import Queue +except ImportError: + from Queue import Queue +from waflib import Utils,Task,Errors,Logs +GAP=10 +class TaskConsumer(Utils.threading.Thread): + def __init__(self): + Utils.threading.Thread.__init__(self) + self.ready=Queue() + self.setDaemon(1) + self.start() + def run(self): + try: + self.loop() + except Exception: + pass + def loop(self): + while 1: + tsk=self.ready.get() + if not isinstance(tsk,Task.TaskBase): + tsk(self) + else: + tsk.process() +pool=Queue() +def get_pool(): + try: + return pool.get(False) + except Exception: + return TaskConsumer() +def put_pool(x): + pool.put(x) +def _free_resources(): + global pool + lst=[] + while pool.qsize(): + lst.append(pool.get()) + for x in lst: + x.ready.put(None) + for x in lst: + x.join() + pool=None +atexit.register(_free_resources) +class Parallel(object): + def __init__(self,bld,j=2): + self.numjobs=j + self.bld=bld + self.outstanding=[] + self.frozen=[] + self.out=Queue(0) + self.count=0 + self.processed=1 + self.stop=False + self.error=[] + self.biter=None + self.dirty=False + def get_next_task(self): + if not self.outstanding: + return None + return self.outstanding.pop(0) + def postpone(self,tsk): + if random.randint(0,1): + self.frozen.insert(0,tsk) + else: + self.frozen.append(tsk) + def refill_task_list(self): + while self.count>self.numjobs*GAP: + self.get_out() + while not self.outstanding: + if self.count: + self.get_out() + elif self.frozen: + try: + cond=self.deadlock==self.processed + except AttributeError: + pass + else: + if cond: + msg='check the build order for the tasks' + for tsk in self.frozen: + if not tsk.run_after: + msg='check the methods runnable_status' + break + lst=[] + for tsk in self.frozen: + lst.append('%s\t-> %r'%(repr(tsk),[id(x)for x in tsk.run_after])) + raise Errors.WafError('Deadlock detected: %s%s'%(msg,''.join(lst))) + self.deadlock=self.processed + if self.frozen: + self.outstanding+=self.frozen + self.frozen=[] + elif not self.count: + self.outstanding.extend(next(self.biter)) + self.total=self.bld.total() + break + def add_more_tasks(self,tsk): + if getattr(tsk,'more_tasks',None): + self.outstanding+=tsk.more_tasks + self.total+=len(tsk.more_tasks) + def get_out(self): + tsk=self.out.get() + if not self.stop: + self.add_more_tasks(tsk) + self.count-=1 + self.dirty=True + return tsk + def error_handler(self,tsk): + if not self.bld.keep: + self.stop=True + self.error.append(tsk) + def add_task(self,tsk): + try: + self.pool + except AttributeError: + self.init_task_pool() + self.ready.put(tsk) + def init_task_pool(self): + pool=self.pool=[get_pool()for i in range(self.numjobs)] + self.ready=Queue(0) + def setq(consumer): + consumer.ready=self.ready + for x in pool: + x.ready.put(setq) + return pool + def free_task_pool(self): + def setq(consumer): + consumer.ready=Queue(0) + self.out.put(self) + try: + pool=self.pool + except AttributeError: + pass + else: + for x in pool: + self.ready.put(setq) + for x in pool: + self.get_out() + for x in pool: + put_pool(x) + self.pool=[] + def start(self): + self.total=self.bld.total() + while not self.stop: + self.refill_task_list() + tsk=self.get_next_task() + if not tsk: + if self.count: + continue + else: + break + if tsk.hasrun: + self.processed+=1 + continue + if self.stop: + break + try: + st=tsk.runnable_status() + except Exception: + self.processed+=1 + tsk.err_msg=Utils.ex_stack() + if not self.stop and self.bld.keep: + tsk.hasrun=Task.SKIPPED + if self.bld.keep==1: + if Logs.verbose>1 or not self.error: + self.error.append(tsk) + self.stop=True + else: + if Logs.verbose>1: + self.error.append(tsk) + continue + tsk.hasrun=Task.EXCEPTION + self.error_handler(tsk) + continue + if st==Task.ASK_LATER: + self.postpone(tsk) + elif st==Task.SKIP_ME: + self.processed+=1 + tsk.hasrun=Task.SKIPPED + self.add_more_tasks(tsk) + else: + tsk.position=(self.processed,self.total) + self.count+=1 + tsk.master=self + self.processed+=1 + if self.numjobs==1: + tsk.process() + else: + self.add_task(tsk) + while self.error and self.count: + self.get_out() + assert(self.count==0 or self.stop) + self.free_task_pool() diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Scripting.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Scripting.py new file mode 100644 index 0000000..1b616a2 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Scripting.py @@ -0,0 +1,376 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,shlex,shutil,traceback,errno,sys,stat +from waflib import Utils,Configure,Logs,Options,ConfigSet,Context,Errors,Build,Node +build_dir_override=None +no_climb_commands=['configure'] +default_cmd="build" +def waf_entry_point(current_directory,version,wafdir): + Logs.init_log() + if Context.WAFVERSION!=version: + Logs.error('Waf script %r and library %r do not match (directory %r)'%(version,Context.WAFVERSION,wafdir)) + sys.exit(1) + if'--version'in sys.argv: + Context.run_dir=current_directory + ctx=Context.create_context('options') + ctx.curdir=current_directory + ctx.parse_args() + sys.exit(0) + Context.waf_dir=wafdir + Context.launch_dir=current_directory + no_climb=os.environ.get('NOCLIMB',None) + if not no_climb: + for k in no_climb_commands: + if k in sys.argv: + no_climb=True + break + cur=current_directory + while cur: + lst=os.listdir(cur) + if Options.lockfile in lst: + env=ConfigSet.ConfigSet() + try: + env.load(os.path.join(cur,Options.lockfile)) + ino=os.stat(cur)[stat.ST_INO] + except Exception: + pass + else: + for x in[env.run_dir,env.top_dir,env.out_dir]: + if Utils.is_win32: + if cur==x: + load=True + break + else: + try: + ino2=os.stat(x)[stat.ST_INO] + except OSError: + pass + else: + if ino==ino2: + load=True + break + else: + Logs.warn('invalid lock file in %s'%cur) + load=False + if load: + Context.run_dir=env.run_dir + Context.top_dir=env.top_dir + Context.out_dir=env.out_dir + break + if not Context.run_dir: + if Context.WSCRIPT_FILE in lst: + Context.run_dir=cur + next=os.path.dirname(cur) + if next==cur: + break + cur=next + if no_climb: + break + if not Context.run_dir: + if'-h'in sys.argv or'--help'in sys.argv: + Logs.warn('No wscript file found: the help message may be incomplete') + Context.run_dir=current_directory + ctx=Context.create_context('options') + ctx.curdir=current_directory + ctx.parse_args() + sys.exit(0) + Logs.error('Waf: Run from a directory containing a file named %r'%Context.WSCRIPT_FILE) + sys.exit(1) + try: + os.chdir(Context.run_dir) + except OSError: + Logs.error('Waf: The folder %r is unreadable'%Context.run_dir) + sys.exit(1) + try: + set_main_module(Context.run_dir+os.sep+Context.WSCRIPT_FILE) + except Errors.WafError as e: + Logs.pprint('RED',e.verbose_msg) + Logs.error(str(e)) + sys.exit(1) + except Exception as e: + Logs.error('Waf: The wscript in %r is unreadable'%Context.run_dir,e) + traceback.print_exc(file=sys.stdout) + sys.exit(2) + try: + run_commands() + except Errors.WafError as e: + if Logs.verbose>1: + Logs.pprint('RED',e.verbose_msg) + Logs.error(e.msg) + sys.exit(1) + except SystemExit: + raise + except Exception as e: + traceback.print_exc(file=sys.stdout) + sys.exit(2) + except KeyboardInterrupt: + Logs.pprint('RED','Interrupted') + sys.exit(68) +def set_main_module(file_path): + Context.g_module=Context.load_module(file_path) + Context.g_module.root_path=file_path + def set_def(obj): + name=obj.__name__ + if not name in Context.g_module.__dict__: + setattr(Context.g_module,name,obj) + for k in[update,dist,distclean,distcheck,update]: + set_def(k) + if not'init'in Context.g_module.__dict__: + Context.g_module.init=Utils.nada + if not'shutdown'in Context.g_module.__dict__: + Context.g_module.shutdown=Utils.nada + if not'options'in Context.g_module.__dict__: + Context.g_module.options=Utils.nada +def parse_options(): + Context.create_context('options').execute() + if not Options.commands: + Options.commands=[default_cmd] + Options.commands=[x for x in Options.commands if x!='options'] + Logs.verbose=Options.options.verbose + Logs.init_log() + if Options.options.zones: + Logs.zones=Options.options.zones.split(',') + if not Logs.verbose: + Logs.verbose=1 + elif Logs.verbose>0: + Logs.zones=['runner'] + if Logs.verbose>2: + Logs.zones=['*'] +def run_command(cmd_name): + ctx=Context.create_context(cmd_name) + ctx.log_timer=Utils.Timer() + ctx.options=Options.options + ctx.cmd=cmd_name + ctx.execute() + return ctx +def run_commands(): + parse_options() + run_command('init') + while Options.commands: + cmd_name=Options.commands.pop(0) + ctx=run_command(cmd_name) + Logs.info('%r finished successfully (%s)'%(cmd_name,str(ctx.log_timer))) + run_command('shutdown') +def _can_distclean(name): + for k in'.o .moc .exe'.split(): + if name.endswith(k): + return True + return False +def distclean_dir(dirname): + for(root,dirs,files)in os.walk(dirname): + for f in files: + if _can_distclean(f): + fname=root+os.sep+f + try: + os.remove(fname) + except OSError: + Logs.warn('Could not remove %r'%fname) + for x in[Context.DBFILE,'config.log']: + try: + os.remove(x) + except OSError: + pass + try: + shutil.rmtree('c4che') + except OSError: + pass +def distclean(ctx): + '''removes the build directory''' + lst=os.listdir('.') + for f in lst: + if f==Options.lockfile: + try: + proj=ConfigSet.ConfigSet(f) + except IOError: + Logs.warn('Could not read %r'%f) + continue + if proj['out_dir']!=proj['top_dir']: + try: + shutil.rmtree(proj['out_dir']) + except IOError: + pass + except OSError as e: + if e.errno!=errno.ENOENT: + Logs.warn('Could not remove %r'%proj['out_dir']) + else: + distclean_dir(proj['out_dir']) + for k in(proj['out_dir'],proj['top_dir'],proj['run_dir']): + p=os.path.join(k,Options.lockfile) + try: + os.remove(p) + except OSError as e: + if e.errno!=errno.ENOENT: + Logs.warn('Could not remove %r'%p) + if not Options.commands: + for x in'.waf-1. waf-1. .waf3-1. waf3-1.'.split(): + if f.startswith(x): + shutil.rmtree(f,ignore_errors=True) +class Dist(Context.Context): + '''creates an archive containing the project source code''' + cmd='dist' + fun='dist' + algo='tar.bz2' + ext_algo={} + def execute(self): + self.recurse([os.path.dirname(Context.g_module.root_path)]) + self.archive() + def archive(self): + import tarfile + arch_name=self.get_arch_name() + try: + self.base_path + except AttributeError: + self.base_path=self.path + node=self.base_path.make_node(arch_name) + try: + node.delete() + except Exception: + pass + files=self.get_files() + if self.algo.startswith('tar.'): + tar=tarfile.open(arch_name,'w:'+self.algo.replace('tar.','')) + for x in files: + self.add_tar_file(x,tar) + tar.close() + elif self.algo=='zip': + import zipfile + zip=zipfile.ZipFile(arch_name,'w',compression=zipfile.ZIP_DEFLATED) + for x in files: + archive_name=self.get_base_name()+'/'+x.path_from(self.base_path) + zip.write(x.abspath(),archive_name,zipfile.ZIP_DEFLATED) + zip.close() + else: + self.fatal('Valid algo types are tar.bz2, tar.gz or zip') + try: + from hashlib import sha1 as sha + except ImportError: + from sha import sha + try: + digest=" (sha=%r)"%sha(node.read()).hexdigest() + except Exception: + digest='' + Logs.info('New archive created: %s%s'%(self.arch_name,digest)) + def get_tar_path(self,node): + return node.abspath() + def add_tar_file(self,x,tar): + p=self.get_tar_path(x) + tinfo=tar.gettarinfo(name=p,arcname=self.get_tar_prefix()+'/'+x.path_from(self.base_path)) + tinfo.uid=0 + tinfo.gid=0 + tinfo.uname='root' + tinfo.gname='root' + fu=None + try: + fu=open(p,'rb') + tar.addfile(tinfo,fileobj=fu) + finally: + if fu: + fu.close() + def get_tar_prefix(self): + try: + return self.tar_prefix + except AttributeError: + return self.get_base_name() + def get_arch_name(self): + try: + self.arch_name + except AttributeError: + self.arch_name=self.get_base_name()+'.'+self.ext_algo.get(self.algo,self.algo) + return self.arch_name + def get_base_name(self): + try: + self.base_name + except AttributeError: + appname=getattr(Context.g_module,Context.APPNAME,'noname') + version=getattr(Context.g_module,Context.VERSION,'1.0') + self.base_name=appname+'-'+version + return self.base_name + def get_excl(self): + try: + return self.excl + except AttributeError: + self.excl=Node.exclude_regs+' **/waf-1.7.* **/.waf-1.7* **/waf3-1.7.* **/.waf3-1.7* **/*~ **/*.rej **/*.orig **/*.pyc **/*.pyo **/*.bak **/*.swp **/.lock-w*' + nd=self.root.find_node(Context.out_dir) + if nd: + self.excl+=' '+nd.path_from(self.base_path) + return self.excl + def get_files(self): + try: + files=self.files + except AttributeError: + files=self.base_path.ant_glob('**/*',excl=self.get_excl()) + return files +def dist(ctx): + '''makes a tarball for redistributing the sources''' + pass +class DistCheck(Dist): + fun='distcheck' + cmd='distcheck' + def execute(self): + self.recurse([os.path.dirname(Context.g_module.root_path)]) + self.archive() + self.check() + def check(self): + import tempfile,tarfile + t=None + try: + t=tarfile.open(self.get_arch_name()) + for x in t: + t.extract(x) + finally: + if t: + t.close() + cfg=[] + if Options.options.distcheck_args: + cfg=shlex.split(Options.options.distcheck_args) + else: + cfg=[x for x in sys.argv if x.startswith('-')] + instdir=tempfile.mkdtemp('.inst',self.get_base_name()) + ret=Utils.subprocess.Popen([sys.executable,sys.argv[0],'configure','install','uninstall','--destdir='+instdir]+cfg,cwd=self.get_base_name()).wait() + if ret: + raise Errors.WafError('distcheck failed with code %i'%ret) + if os.path.exists(instdir): + raise Errors.WafError('distcheck succeeded, but files were left in %s'%instdir) + shutil.rmtree(self.get_base_name()) +def distcheck(ctx): + '''checks if the project compiles (tarball from 'dist')''' + pass +def update(ctx): + '''updates the plugins from the *waflib/extras* directory''' + lst=Options.options.files.split(',') + if not lst: + lst=[x for x in Utils.listdir(Context.waf_dir+'/waflib/extras')if x.endswith('.py')] + for x in lst: + tool=x.replace('.py','') + try: + Configure.download_tool(tool,force=True,ctx=ctx) + except Errors.WafError: + Logs.error('Could not find the tool %s in the remote repository'%x) +def autoconfigure(execute_method): + def execute(self): + if not Configure.autoconfig: + return execute_method(self) + env=ConfigSet.ConfigSet() + do_config=False + try: + env.load(os.path.join(Context.top_dir,Options.lockfile)) + except Exception: + Logs.warn('Configuring the project') + do_config=True + else: + if env.run_dir!=Context.run_dir: + do_config=True + else: + h=0 + for f in env['files']: + h=Utils.h_list((h,Utils.readf(f,'rb'))) + do_config=h!=env.hash + if do_config: + Options.commands.insert(0,self.cmd) + Options.commands.insert(0,'configure') + return + return execute_method(self) + return execute +Build.BuildContext.execute=autoconfigure(Build.BuildContext.execute) diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Task.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Task.py new file mode 100644 index 0000000..feec133 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Task.py @@ -0,0 +1,683 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,shutil,re,tempfile +from waflib import Utils,Logs,Errors +NOT_RUN=0 +MISSING=1 +CRASHED=2 +EXCEPTION=3 +SKIPPED=8 +SUCCESS=9 +ASK_LATER=-1 +SKIP_ME=-2 +RUN_ME=-3 +COMPILE_TEMPLATE_SHELL=''' +def f(tsk): + env = tsk.env + gen = tsk.generator + bld = gen.bld + wd = getattr(tsk, 'cwd', None) + p = env.get_flat + tsk.last_cmd = cmd = \'\'\' %s \'\'\' % s + return tsk.exec_command(cmd, cwd=wd, env=env.env or None) +''' +COMPILE_TEMPLATE_NOSHELL=''' +def f(tsk): + env = tsk.env + gen = tsk.generator + bld = gen.bld + wd = getattr(tsk, 'cwd', None) + def to_list(xx): + if isinstance(xx, str): return [xx] + return xx + tsk.last_cmd = lst = [] + %s + lst = [x for x in lst if x] + return tsk.exec_command(lst, cwd=wd, env=env.env or None) +''' +def cache_outputs(cls): + m1=cls.run + def run(self): + bld=self.generator.bld + if bld.cache_global and not bld.nocache: + if self.can_retrieve_cache(): + return 0 + return m1(self) + cls.run=run + m2=cls.post_run + def post_run(self): + bld=self.generator.bld + ret=m2(self) + if bld.cache_global and not bld.nocache: + self.put_files_cache() + return ret + cls.post_run=post_run + return cls +classes={} +class store_task_type(type): + def __init__(cls,name,bases,dict): + super(store_task_type,cls).__init__(name,bases,dict) + name=cls.__name__ + if name.endswith('_task'): + name=name.replace('_task','') + if name!='evil'and name!='TaskBase': + global classes + if getattr(cls,'run_str',None): + (f,dvars)=compile_fun(cls.run_str,cls.shell) + cls.hcode=cls.run_str + cls.run_str=None + cls.run=f + cls.vars=list(set(cls.vars+dvars)) + cls.vars.sort() + elif getattr(cls,'run',None)and not'hcode'in cls.__dict__: + cls.hcode=Utils.h_fun(cls.run) + if not getattr(cls,'nocache',None): + cls=cache_outputs(cls) + getattr(cls,'register',classes)[name]=cls +evil=store_task_type('evil',(object,),{}) +class TaskBase(evil): + color='GREEN' + ext_in=[] + ext_out=[] + before=[] + after=[] + hcode='' + def __init__(self,*k,**kw): + self.hasrun=NOT_RUN + try: + self.generator=kw['generator'] + except KeyError: + self.generator=self + def __repr__(self): + return'\n\t{task %r: %s %s}'%(self.__class__.__name__,id(self),str(getattr(self,'fun',''))) + def __str__(self): + if hasattr(self,'fun'): + return'executing: %s\n'%self.fun.__name__ + return self.__class__.__name__+'\n' + def __hash__(self): + return id(self) + def exec_command(self,cmd,**kw): + bld=self.generator.bld + try: + if not kw.get('cwd',None): + kw['cwd']=bld.cwd + except AttributeError: + bld.cwd=kw['cwd']=bld.variant_dir + return bld.exec_command(cmd,**kw) + def runnable_status(self): + return RUN_ME + def process(self): + m=self.master + if m.stop: + m.out.put(self) + return + try: + del self.generator.bld.task_sigs[self.uid()] + except KeyError: + pass + try: + self.generator.bld.returned_tasks.append(self) + self.log_display(self.generator.bld) + ret=self.run() + except Exception: + self.err_msg=Utils.ex_stack() + self.hasrun=EXCEPTION + m.error_handler(self) + m.out.put(self) + return + if ret: + self.err_code=ret + self.hasrun=CRASHED + else: + try: + self.post_run() + except Errors.WafError: + pass + except Exception: + self.err_msg=Utils.ex_stack() + self.hasrun=EXCEPTION + else: + self.hasrun=SUCCESS + if self.hasrun!=SUCCESS: + m.error_handler(self) + m.out.put(self) + def run(self): + if hasattr(self,'fun'): + return self.fun(self) + return 0 + def post_run(self): + pass + def log_display(self,bld): + bld.to_log(self.display()) + def display(self): + col1=Logs.colors(self.color) + col2=Logs.colors.NORMAL + master=self.master + def cur(): + tmp=-1 + if hasattr(master,'ready'): + tmp-=master.ready.qsize() + return master.processed+tmp + if self.generator.bld.progress_bar==1: + return self.generator.bld.progress_line(cur(),master.total,col1,col2) + if self.generator.bld.progress_bar==2: + ela=str(self.generator.bld.timer) + try: + ins=','.join([n.name for n in self.inputs]) + except AttributeError: + ins='' + try: + outs=','.join([n.name for n in self.outputs]) + except AttributeError: + outs='' + return'|Total %s|Current %s|Inputs %s|Outputs %s|Time %s|\n'%(master.total,cur(),ins,outs,ela) + s=str(self) + if not s: + return None + total=master.total + n=len(str(total)) + fs='[%%%dd/%%%dd] %%s%%s%%s'%(n,n) + return fs%(cur(),total,col1,s,col2) + def attr(self,att,default=None): + ret=getattr(self,att,self) + if ret is self:return getattr(self.__class__,att,default) + return ret + def hash_constraints(self): + cls=self.__class__ + tup=(str(cls.before),str(cls.after),str(cls.ext_in),str(cls.ext_out),cls.__name__,cls.hcode) + h=hash(tup) + return h + def format_error(self): + msg=getattr(self,'last_cmd','') + name=getattr(self.generator,'name','') + if getattr(self,"err_msg",None): + return self.err_msg + elif not self.hasrun: + return'task in %r was not executed for some reason: %r'%(name,self) + elif self.hasrun==CRASHED: + try: + return' -> task in %r failed (exit status %r): %r\n%r'%(name,self.err_code,self,msg) + except AttributeError: + return' -> task in %r failed: %r\n%r'%(name,self,msg) + elif self.hasrun==MISSING: + return' -> missing files in %r: %r\n%r'%(name,self,msg) + else: + return'invalid status for task in %r: %r'%(name,self.hasrun) + def colon(self,var1,var2): + tmp=self.env[var1] + if isinstance(var2,str): + it=self.env[var2] + else: + it=var2 + if isinstance(tmp,str): + return[tmp%x for x in it] + else: + if Logs.verbose and not tmp and it: + Logs.warn('Missing env variable %r for task %r (generator %r)'%(var1,self,self.generator)) + lst=[] + for y in it: + lst.extend(tmp) + lst.append(y) + return lst +class Task(TaskBase): + vars=[] + shell=False + def __init__(self,*k,**kw): + TaskBase.__init__(self,*k,**kw) + self.env=kw['env'] + self.inputs=[] + self.outputs=[] + self.dep_nodes=[] + self.run_after=set([]) + def __str__(self): + env=self.env + src_str=' '.join([a.nice_path()for a in self.inputs]) + tgt_str=' '.join([a.nice_path()for a in self.outputs]) + if self.outputs:sep=' -> ' + else:sep='' + return'%s: %s%s%s\n'%(self.__class__.__name__.replace('_task',''),src_str,sep,tgt_str) + def __repr__(self): + try: + ins=",".join([x.name for x in self.inputs]) + outs=",".join([x.name for x in self.outputs]) + except AttributeError: + ins=",".join([str(x)for x in self.inputs]) + outs=",".join([str(x)for x in self.outputs]) + return"".join(['\n\t{task %r: '%id(self),self.__class__.__name__," ",ins," -> ",outs,'}']) + def uid(self): + try: + return self.uid_ + except AttributeError: + m=Utils.md5() + up=m.update + up(self.__class__.__name__.encode()) + for x in self.inputs+self.outputs: + up(x.abspath().encode()) + self.uid_=m.digest() + return self.uid_ + def set_inputs(self,inp): + if isinstance(inp,list):self.inputs+=inp + else:self.inputs.append(inp) + def set_outputs(self,out): + if isinstance(out,list):self.outputs+=out + else:self.outputs.append(out) + def set_run_after(self,task): + assert isinstance(task,TaskBase) + self.run_after.add(task) + def signature(self): + try:return self.cache_sig + except AttributeError:pass + self.m=Utils.md5() + self.m.update(self.hcode.encode()) + self.sig_explicit_deps() + self.sig_vars() + if self.scan: + try: + self.sig_implicit_deps() + except Errors.TaskRescan: + return self.signature() + ret=self.cache_sig=self.m.digest() + return ret + def runnable_status(self): + for t in self.run_after: + if not t.hasrun: + return ASK_LATER + bld=self.generator.bld + try: + new_sig=self.signature() + except Errors.TaskNotReady: + return ASK_LATER + key=self.uid() + try: + prev_sig=bld.task_sigs[key] + except KeyError: + Logs.debug("task: task %r must run as it was never run before or the task code changed"%self) + return RUN_ME + for node in self.outputs: + try: + if node.sig!=new_sig: + return RUN_ME + except AttributeError: + Logs.debug("task: task %r must run as the output nodes do not exist"%self) + return RUN_ME + if new_sig!=prev_sig: + return RUN_ME + return SKIP_ME + def post_run(self): + bld=self.generator.bld + sig=self.signature() + for node in self.outputs: + try: + os.stat(node.abspath()) + except OSError: + self.hasrun=MISSING + self.err_msg='-> missing file: %r'%node.abspath() + raise Errors.WafError(self.err_msg) + node.sig=sig + bld.task_sigs[self.uid()]=self.cache_sig + def sig_explicit_deps(self): + bld=self.generator.bld + upd=self.m.update + for x in self.inputs+self.dep_nodes: + try: + upd(x.get_bld_sig()) + except(AttributeError,TypeError): + raise Errors.WafError('Missing node signature for %r (required by %r)'%(x,self)) + if bld.deps_man: + additional_deps=bld.deps_man + for x in self.inputs+self.outputs: + try: + d=additional_deps[id(x)] + except KeyError: + continue + for v in d: + if isinstance(v,bld.root.__class__): + try: + v=v.get_bld_sig() + except AttributeError: + raise Errors.WafError('Missing node signature for %r (required by %r)'%(v,self)) + elif hasattr(v,'__call__'): + v=v() + upd(v) + return self.m.digest() + def sig_vars(self): + bld=self.generator.bld + env=self.env + upd=self.m.update + act_sig=bld.hash_env_vars(env,self.__class__.vars) + upd(act_sig) + dep_vars=getattr(self,'dep_vars',None) + if dep_vars: + upd(bld.hash_env_vars(env,dep_vars)) + return self.m.digest() + scan=None + def sig_implicit_deps(self): + bld=self.generator.bld + key=self.uid() + prev=bld.task_sigs.get((key,'imp'),[]) + if prev: + try: + if prev==self.compute_sig_implicit_deps(): + return prev + except Exception: + for x in bld.node_deps.get(self.uid(),[]): + if x.is_child_of(bld.srcnode): + try: + os.stat(x.abspath()) + except OSError: + try: + del x.parent.children[x.name] + except KeyError: + pass + del bld.task_sigs[(key,'imp')] + raise Errors.TaskRescan('rescan') + (nodes,names)=self.scan() + if Logs.verbose: + Logs.debug('deps: scanner for %s returned %s %s'%(str(self),str(nodes),str(names))) + bld.node_deps[key]=nodes + bld.raw_deps[key]=names + self.are_implicit_nodes_ready() + try: + bld.task_sigs[(key,'imp')]=sig=self.compute_sig_implicit_deps() + except Exception: + if Logs.verbose: + for k in bld.node_deps.get(self.uid(),[]): + try: + k.get_bld_sig() + except Exception: + Logs.warn('Missing signature for node %r (may cause rebuilds)'%k) + else: + return sig + def compute_sig_implicit_deps(self): + upd=self.m.update + bld=self.generator.bld + self.are_implicit_nodes_ready() + for k in bld.node_deps.get(self.uid(),[]): + upd(k.get_bld_sig()) + return self.m.digest() + def are_implicit_nodes_ready(self): + bld=self.generator.bld + try: + cache=bld.dct_implicit_nodes + except AttributeError: + bld.dct_implicit_nodes=cache={} + try: + dct=cache[bld.cur] + except KeyError: + dct=cache[bld.cur]={} + for tsk in bld.cur_tasks: + for x in tsk.outputs: + dct[x]=tsk + modified=False + for x in bld.node_deps.get(self.uid(),[]): + if x in dct: + self.run_after.add(dct[x]) + modified=True + if modified: + for tsk in self.run_after: + if not tsk.hasrun: + raise Errors.TaskNotReady('not ready') + def can_retrieve_cache(self): + if not getattr(self,'outputs',None): + return None + sig=self.signature() + ssig=Utils.to_hex(self.uid())+Utils.to_hex(sig) + dname=os.path.join(self.generator.bld.cache_global,ssig) + try: + t1=os.stat(dname).st_mtime + except OSError: + return None + for node in self.outputs: + orig=os.path.join(dname,node.name) + try: + shutil.copy2(orig,node.abspath()) + os.utime(orig,None) + except(OSError,IOError): + Logs.debug('task: failed retrieving file') + return None + try: + t2=os.stat(dname).st_mtime + except OSError: + return None + if t1!=t2: + return None + for node in self.outputs: + node.sig=sig + if self.generator.bld.progress_bar<1: + self.generator.bld.to_log('restoring from cache %r\n'%node.abspath()) + self.cached=True + return True + def put_files_cache(self): + if getattr(self,'cached',None): + return None + if not getattr(self,'outputs',None): + return None + sig=self.signature() + ssig=Utils.to_hex(self.uid())+Utils.to_hex(sig) + dname=os.path.join(self.generator.bld.cache_global,ssig) + tmpdir=tempfile.mkdtemp(prefix=self.generator.bld.cache_global+os.sep+'waf') + try: + shutil.rmtree(dname) + except Exception: + pass + try: + for node in self.outputs: + dest=os.path.join(tmpdir,node.name) + shutil.copy2(node.abspath(),dest) + except(OSError,IOError): + try: + shutil.rmtree(tmpdir) + except Exception: + pass + else: + try: + os.rename(tmpdir,dname) + except OSError: + try: + shutil.rmtree(tmpdir) + except Exception: + pass + else: + try: + os.chmod(dname,Utils.O755) + except Exception: + pass +def is_before(t1,t2): + to_list=Utils.to_list + for k in to_list(t2.ext_in): + if k in to_list(t1.ext_out): + return 1 + if t1.__class__.__name__ in to_list(t2.after): + return 1 + if t2.__class__.__name__ in to_list(t1.before): + return 1 + return 0 +def set_file_constraints(tasks): + ins=Utils.defaultdict(set) + outs=Utils.defaultdict(set) + for x in tasks: + for a in getattr(x,'inputs',[])+getattr(x,'dep_nodes',[]): + ins[id(a)].add(x) + for a in getattr(x,'outputs',[]): + outs[id(a)].add(x) + links=set(ins.keys()).intersection(outs.keys()) + for k in links: + for a in ins[k]: + a.run_after.update(outs[k]) +def set_precedence_constraints(tasks): + cstr_groups=Utils.defaultdict(list) + for x in tasks: + h=x.hash_constraints() + cstr_groups[h].append(x) + keys=list(cstr_groups.keys()) + maxi=len(keys) + for i in range(maxi): + t1=cstr_groups[keys[i]][0] + for j in range(i+1,maxi): + t2=cstr_groups[keys[j]][0] + if is_before(t1,t2): + a=i + b=j + elif is_before(t2,t1): + a=j + b=i + else: + continue + aval=set(cstr_groups[keys[a]]) + for x in cstr_groups[keys[b]]: + x.run_after.update(aval) +def funex(c): + dc={} + exec(c,dc) + return dc['f'] +reg_act=re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<var>\w+)(?P<code>.*?)\})",re.M) +def compile_fun_shell(line): + extr=[] + def repl(match): + g=match.group + if g('dollar'):return"$" + elif g('backslash'):return'\\\\' + elif g('subst'):extr.append((g('var'),g('code')));return"%s" + return None + line=reg_act.sub(repl,line)or line + parm=[] + dvars=[] + app=parm.append + for(var,meth)in extr: + if var=='SRC': + if meth:app('tsk.inputs%s'%meth) + else:app('" ".join([a.path_from(bld.bldnode) for a in tsk.inputs])') + elif var=='TGT': + if meth:app('tsk.outputs%s'%meth) + else:app('" ".join([a.path_from(bld.bldnode) for a in tsk.outputs])') + elif meth: + if meth.startswith(':'): + m=meth[1:] + if m=='SRC': + m='[a.path_from(bld.bldnode) for a in tsk.inputs]' + elif m=='TGT': + m='[a.path_from(bld.bldnode) for a in tsk.outputs]' + elif m[:3]not in('tsk','gen','bld'): + dvars.extend([var,meth[1:]]) + m='%r'%m + app('" ".join(tsk.colon(%r, %s))'%(var,m)) + else: + app('%s%s'%(var,meth)) + else: + if not var in dvars:dvars.append(var) + app("p('%s')"%var) + if parm:parm="%% (%s) "%(',\n\t\t'.join(parm)) + else:parm='' + c=COMPILE_TEMPLATE_SHELL%(line,parm) + Logs.debug('action: %s'%c.strip().splitlines()) + return(funex(c),dvars) +def compile_fun_noshell(line): + extr=[] + def repl(match): + g=match.group + if g('dollar'):return"$" + elif g('subst'):extr.append((g('var'),g('code')));return"<<|@|>>" + return None + line2=reg_act.sub(repl,line) + params=line2.split('<<|@|>>') + assert(extr) + buf=[] + dvars=[] + app=buf.append + for x in range(len(extr)): + params[x]=params[x].strip() + if params[x]: + app("lst.extend(%r)"%params[x].split()) + (var,meth)=extr[x] + if var=='SRC': + if meth:app('lst.append(tsk.inputs%s)'%meth) + else:app("lst.extend([a.path_from(bld.bldnode) for a in tsk.inputs])") + elif var=='TGT': + if meth:app('lst.append(tsk.outputs%s)'%meth) + else:app("lst.extend([a.path_from(bld.bldnode) for a in tsk.outputs])") + elif meth: + if meth.startswith(':'): + m=meth[1:] + if m=='SRC': + m='[a.path_from(bld.bldnode) for a in tsk.inputs]' + elif m=='TGT': + m='[a.path_from(bld.bldnode) for a in tsk.outputs]' + elif m[:3]not in('tsk','gen','bld'): + dvars.extend([var,m]) + m='%r'%m + app('lst.extend(tsk.colon(%r, %s))'%(var,m)) + else: + app('lst.extend(gen.to_list(%s%s))'%(var,meth)) + else: + app('lst.extend(to_list(env[%r]))'%var) + if not var in dvars:dvars.append(var) + if extr: + if params[-1]: + app("lst.extend(%r)"%params[-1].split()) + fun=COMPILE_TEMPLATE_NOSHELL%"\n\t".join(buf) + Logs.debug('action: %s'%fun.strip().splitlines()) + return(funex(fun),dvars) +def compile_fun(line,shell=False): + if line.find('<')>0 or line.find('>')>0 or line.find('&&')>0: + shell=True + if shell: + return compile_fun_shell(line) + else: + return compile_fun_noshell(line) +def task_factory(name,func=None,vars=None,color='GREEN',ext_in=[],ext_out=[],before=[],after=[],shell=False,scan=None): + params={'vars':vars or[],'color':color,'name':name,'ext_in':Utils.to_list(ext_in),'ext_out':Utils.to_list(ext_out),'before':Utils.to_list(before),'after':Utils.to_list(after),'shell':shell,'scan':scan,} + if isinstance(func,str): + params['run_str']=func + else: + params['run']=func + cls=type(Task)(name,(Task,),params) + global classes + classes[name]=cls + return cls +def always_run(cls): + old=cls.runnable_status + def always(self): + ret=old(self) + if ret==SKIP_ME: + ret=RUN_ME + return ret + cls.runnable_status=always + return cls +def update_outputs(cls): + old_post_run=cls.post_run + def post_run(self): + old_post_run(self) + for node in self.outputs: + node.sig=Utils.h_file(node.abspath()) + self.generator.bld.task_sigs[node.abspath()]=self.uid() + cls.post_run=post_run + old_runnable_status=cls.runnable_status + def runnable_status(self): + status=old_runnable_status(self) + if status!=RUN_ME: + return status + try: + bld=self.generator.bld + prev_sig=bld.task_sigs[self.uid()] + if prev_sig==self.signature(): + for x in self.outputs: + if not x.is_child_of(bld.bldnode): + x.sig=Utils.h_file(x.abspath()) + if not x.sig or bld.task_sigs[x.abspath()]!=self.uid(): + return RUN_ME + return SKIP_ME + except OSError: + pass + except IOError: + pass + except KeyError: + pass + except IndexError: + pass + except AttributeError: + pass + return RUN_ME + cls.runnable_status=runnable_status + return cls diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/TaskGen.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/TaskGen.py new file mode 100644 index 0000000..ebed34d --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/TaskGen.py @@ -0,0 +1,405 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import copy,re,os +from waflib import Task,Utils,Logs,Errors,ConfigSet,Node +feats=Utils.defaultdict(set) +class task_gen(object): + mappings={} + prec=Utils.defaultdict(list) + def __init__(self,*k,**kw): + self.source='' + self.target='' + self.meths=[] + self.prec=Utils.defaultdict(list) + self.mappings={} + self.features=[] + self.tasks=[] + if not'bld'in kw: + self.env=ConfigSet.ConfigSet() + self.idx=0 + self.path=None + else: + self.bld=kw['bld'] + self.env=self.bld.env.derive() + self.path=self.bld.path + try: + self.idx=self.bld.idx[id(self.path)]=self.bld.idx.get(id(self.path),0)+1 + except AttributeError: + self.bld.idx={} + self.idx=self.bld.idx[id(self.path)]=1 + for key,val in kw.items(): + setattr(self,key,val) + def __str__(self): + return"<task_gen %r declared in %s>"%(self.name,self.path.abspath()) + def __repr__(self): + lst=[] + for x in self.__dict__.keys(): + if x not in['env','bld','compiled_tasks','tasks']: + lst.append("%s=%s"%(x,repr(getattr(self,x)))) + return"bld(%s) in %s"%(", ".join(lst),self.path.abspath()) + def get_name(self): + try: + return self._name + except AttributeError: + if isinstance(self.target,list): + lst=[str(x)for x in self.target] + name=self._name=','.join(lst) + else: + name=self._name=str(self.target) + return name + def set_name(self,name): + self._name=name + name=property(get_name,set_name) + def to_list(self,val): + if isinstance(val,str):return val.split() + else:return val + def post(self): + if getattr(self,'posted',None): + return False + self.posted=True + keys=set(self.meths) + self.features=Utils.to_list(self.features) + for x in self.features+['*']: + st=feats[x] + if not st: + if not x in Task.classes: + Logs.warn('feature %r does not exist - bind at least one method to it'%x) + keys.update(list(st)) + prec={} + prec_tbl=self.prec or task_gen.prec + for x in prec_tbl: + if x in keys: + prec[x]=prec_tbl[x] + tmp=[] + for a in keys: + for x in prec.values(): + if a in x:break + else: + tmp.append(a) + tmp.sort() + out=[] + while tmp: + e=tmp.pop() + if e in keys:out.append(e) + try: + nlst=prec[e] + except KeyError: + pass + else: + del prec[e] + for x in nlst: + for y in prec: + if x in prec[y]: + break + else: + tmp.append(x) + if prec: + raise Errors.WafError('Cycle detected in the method execution %r'%prec) + out.reverse() + self.meths=out + Logs.debug('task_gen: posting %s %d'%(self,id(self))) + for x in out: + try: + v=getattr(self,x) + except AttributeError: + raise Errors.WafError('%r is not a valid task generator method'%x) + Logs.debug('task_gen: -> %s (%d)'%(x,id(self))) + v() + Logs.debug('task_gen: posted %s'%self.name) + return True + def get_hook(self,node): + name=node.name + for k in self.mappings: + if name.endswith(k): + return self.mappings[k] + for k in task_gen.mappings: + if name.endswith(k): + return task_gen.mappings[k] + raise Errors.WafError("File %r has no mapping in %r (did you forget to load a waf tool?)"%(node,task_gen.mappings.keys())) + def create_task(self,name,src=None,tgt=None): + task=Task.classes[name](env=self.env.derive(),generator=self) + if src: + task.set_inputs(src) + if tgt: + task.set_outputs(tgt) + self.tasks.append(task) + return task + def clone(self,env): + newobj=self.bld() + for x in self.__dict__: + if x in['env','bld']: + continue + elif x in['path','features']: + setattr(newobj,x,getattr(self,x)) + else: + setattr(newobj,x,copy.copy(getattr(self,x))) + newobj.posted=False + if isinstance(env,str): + newobj.env=self.bld.all_envs[env].derive() + else: + newobj.env=env.derive() + return newobj +def declare_chain(name='',rule=None,reentrant=None,color='BLUE',ext_in=[],ext_out=[],before=[],after=[],decider=None,scan=None,install_path=None,shell=False): + ext_in=Utils.to_list(ext_in) + ext_out=Utils.to_list(ext_out) + if not name: + name=rule + cls=Task.task_factory(name,rule,color=color,ext_in=ext_in,ext_out=ext_out,before=before,after=after,scan=scan,shell=shell) + def x_file(self,node): + ext=decider and decider(self,node)or cls.ext_out + if ext_in: + _ext_in=ext_in[0] + tsk=self.create_task(name,node) + cnt=0 + keys=list(self.mappings.keys())+list(self.__class__.mappings.keys()) + for x in ext: + k=node.change_ext(x,ext_in=_ext_in) + tsk.outputs.append(k) + if reentrant!=None: + if cnt<int(reentrant): + self.source.append(k) + else: + for y in keys: + if k.name.endswith(y): + self.source.append(k) + break + cnt+=1 + if install_path: + self.bld.install_files(install_path,tsk.outputs) + return tsk + for x in cls.ext_in: + task_gen.mappings[x]=x_file + return x_file +def taskgen_method(func): + setattr(task_gen,func.__name__,func) + return func +def feature(*k): + def deco(func): + setattr(task_gen,func.__name__,func) + for name in k: + feats[name].update([func.__name__]) + return func + return deco +def before_method(*k): + def deco(func): + setattr(task_gen,func.__name__,func) + for fun_name in k: + if not func.__name__ in task_gen.prec[fun_name]: + task_gen.prec[fun_name].append(func.__name__) + return func + return deco +before=before_method +def after_method(*k): + def deco(func): + setattr(task_gen,func.__name__,func) + for fun_name in k: + if not fun_name in task_gen.prec[func.__name__]: + task_gen.prec[func.__name__].append(fun_name) + return func + return deco +after=after_method +def extension(*k): + def deco(func): + setattr(task_gen,func.__name__,func) + for x in k: + task_gen.mappings[x]=func + return func + return deco +@taskgen_method +def to_nodes(self,lst,path=None): + tmp=[] + path=path or self.path + find=path.find_resource + if isinstance(lst,self.path.__class__): + lst=[lst] + for x in Utils.to_list(lst): + if isinstance(x,str): + node=find(x) + else: + node=x + if not node: + raise Errors.WafError("source not found: %r in %r"%(x,self)) + tmp.append(node) + return tmp +@feature('*') +def process_source(self): + self.source=self.to_nodes(getattr(self,'source',[])) + for node in self.source: + self.get_hook(node)(self,node) +@feature('*') +@before_method('process_source') +def process_rule(self): + if not getattr(self,'rule',None): + return + name=str(getattr(self,'name',None)or self.target or getattr(self.rule,'__name__',self.rule)) + try: + cache=self.bld.cache_rule_attr + except AttributeError: + cache=self.bld.cache_rule_attr={} + cls=None + if getattr(self,'cache_rule','True'): + try: + cls=cache[(name,self.rule)] + except KeyError: + pass + if not cls: + cls=Task.task_factory(name,self.rule,getattr(self,'vars',[]),shell=getattr(self,'shell',True),color=getattr(self,'color','BLUE'),scan=getattr(self,'scan',None)) + if getattr(self,'scan',None): + cls.scan=self.scan + elif getattr(self,'deps',None): + def scan(self): + nodes=[] + for x in self.generator.to_list(getattr(self.generator,'deps',None)): + node=self.generator.path.find_resource(x) + if not node: + self.generator.bld.fatal('Could not find %r (was it declared?)'%x) + nodes.append(node) + return[nodes,[]] + cls.scan=scan + if getattr(self,'update_outputs',None): + Task.update_outputs(cls) + if getattr(self,'always',None): + Task.always_run(cls) + for x in['after','before','ext_in','ext_out']: + setattr(cls,x,getattr(self,x,[])) + if getattr(self,'cache_rule','True'): + cache[(name,self.rule)]=cls + tsk=self.create_task(name) + if getattr(self,'target',None): + if isinstance(self.target,str): + self.target=self.target.split() + if not isinstance(self.target,list): + self.target=[self.target] + for x in self.target: + if isinstance(x,str): + tsk.outputs.append(self.path.find_or_declare(x)) + else: + x.parent.mkdir() + tsk.outputs.append(x) + if getattr(self,'install_path',None): + self.bld.install_files(self.install_path,tsk.outputs) + if getattr(self,'source',None): + tsk.inputs=self.to_nodes(self.source) + self.source=[] + if getattr(self,'cwd',None): + tsk.cwd=self.cwd +@feature('seq') +def sequence_order(self): + if self.meths and self.meths[-1]!='sequence_order': + self.meths.append('sequence_order') + return + if getattr(self,'seq_start',None): + return + if getattr(self.bld,'prev',None): + self.bld.prev.post() + for x in self.bld.prev.tasks: + for y in self.tasks: + y.set_run_after(x) + self.bld.prev=self +re_m4=re.compile('@(\w+)@',re.M) +class subst_pc(Task.Task): + def run(self): + if getattr(self.generator,'is_copy',None): + self.outputs[0].write(self.inputs[0].read('rb'),'wb') + if getattr(self.generator,'chmod',None): + os.chmod(self.outputs[0].abspath(),self.generator.chmod) + return None + if getattr(self.generator,'fun',None): + return self.generator.fun(self) + code=self.inputs[0].read(encoding=getattr(self.generator,'encoding','ISO8859-1')) + if getattr(self.generator,'subst_fun',None): + code=self.generator.subst_fun(self,code) + if code is not None: + self.outputs[0].write(code,encoding=getattr(self.generator,'encoding','ISO8859-1')) + return + code=code.replace('%','%%') + lst=[] + def repl(match): + g=match.group + if g(1): + lst.append(g(1)) + return"%%(%s)s"%g(1) + return'' + global re_m4 + code=getattr(self.generator,'re_m4',re_m4).sub(repl,code) + try: + d=self.generator.dct + except AttributeError: + d={} + for x in lst: + tmp=getattr(self.generator,x,'')or self.env.get_flat(x)or self.env.get_flat(x.upper()) + d[x]=str(tmp) + code=code%d + self.outputs[0].write(code,encoding=getattr(self.generator,'encoding','ISO8859-1')) + self.generator.bld.raw_deps[self.uid()]=self.dep_vars=lst + try:delattr(self,'cache_sig') + except AttributeError:pass + if getattr(self.generator,'chmod',None): + os.chmod(self.outputs[0].abspath(),self.generator.chmod) + def sig_vars(self): + bld=self.generator.bld + env=self.env + upd=self.m.update + if getattr(self.generator,'fun',None): + upd(Utils.h_fun(self.generator.fun).encode()) + if getattr(self.generator,'subst_fun',None): + upd(Utils.h_fun(self.generator.subst_fun).encode()) + vars=self.generator.bld.raw_deps.get(self.uid(),[]) + act_sig=bld.hash_env_vars(env,vars) + upd(act_sig) + lst=[getattr(self.generator,x,'')for x in vars] + upd(Utils.h_list(lst)) + return self.m.digest() +@extension('.pc.in') +def add_pcfile(self,node): + tsk=self.create_task('subst_pc',node,node.change_ext('.pc','.pc.in')) + self.bld.install_files(getattr(self,'install_path','${LIBDIR}/pkgconfig/'),tsk.outputs) +class subst(subst_pc): + pass +@feature('subst') +@before_method('process_source','process_rule') +def process_subst(self): + src=Utils.to_list(getattr(self,'source',[])) + if isinstance(src,Node.Node): + src=[src] + tgt=Utils.to_list(getattr(self,'target',[])) + if isinstance(tgt,Node.Node): + tgt=[tgt] + if len(src)!=len(tgt): + raise Errors.WafError('invalid number of source/target for %r'%self) + for x,y in zip(src,tgt): + if not x or not y: + raise Errors.WafError('null source or target for %r'%self) + a,b=None,None + if isinstance(x,str)and isinstance(y,str)and x==y: + a=self.path.find_node(x) + b=self.path.get_bld().make_node(y) + if not os.path.isfile(b.abspath()): + b.sig=None + b.parent.mkdir() + else: + if isinstance(x,str): + a=self.path.find_resource(x) + elif isinstance(x,Node.Node): + a=x + if isinstance(y,str): + b=self.path.find_or_declare(y) + elif isinstance(y,Node.Node): + b=y + if not a: + raise Errors.WafError('cound not find %r for %r'%(x,self)) + has_constraints=False + tsk=self.create_task('subst',a,b) + for k in('after','before','ext_in','ext_out'): + val=getattr(self,k,None) + if val: + has_constraints=True + setattr(tsk,k,val) + if not has_constraints and b.name.endswith('.h'): + tsk.before=[k for k in('c','cxx')if k in Task.classes] + inst_to=getattr(self,'install_path',None) + if inst_to: + self.bld.install_files(inst_to,b,chmod=getattr(self,'chmod',Utils.O644)) + self.source=[] diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__init__.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__init__.py new file mode 100644 index 0000000..efeed79 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__init__.py @@ -0,0 +1,4 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/__init__.cpython-34.pyc b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/__init__.cpython-34.pyc Binary files differnew file mode 100644 index 0000000..93056b0 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/__init__.cpython-34.pyc diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/ar.cpython-34.pyc b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/ar.cpython-34.pyc Binary files differnew file mode 100644 index 0000000..e8d66ef --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/ar.cpython-34.pyc diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/c.cpython-34.pyc b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/c.cpython-34.pyc Binary files differnew file mode 100644 index 0000000..510ce72 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/c.cpython-34.pyc diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/c_aliases.cpython-34.pyc b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/c_aliases.cpython-34.pyc Binary files differnew file mode 100644 index 0000000..83ff86a --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/c_aliases.cpython-34.pyc diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/c_config.cpython-34.pyc b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/c_config.cpython-34.pyc Binary files differnew file mode 100644 index 0000000..2a47dc0 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/c_config.cpython-34.pyc diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/c_osx.cpython-34.pyc b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/c_osx.cpython-34.pyc Binary files differnew file mode 100644 index 0000000..cbd0ec6 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/c_osx.cpython-34.pyc diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/c_preproc.cpython-34.pyc b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/c_preproc.cpython-34.pyc Binary files differnew file mode 100644 index 0000000..85f0ddf --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/c_preproc.cpython-34.pyc diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/c_tests.cpython-34.pyc b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/c_tests.cpython-34.pyc Binary files differnew file mode 100644 index 0000000..2342461 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/c_tests.cpython-34.pyc diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/ccroot.cpython-34.pyc b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/ccroot.cpython-34.pyc Binary files differnew file mode 100644 index 0000000..8d04384 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/ccroot.cpython-34.pyc diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/compiler_c.cpython-34.pyc b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/compiler_c.cpython-34.pyc Binary files differnew file mode 100644 index 0000000..6f7933d --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/compiler_c.cpython-34.pyc diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/compiler_cxx.cpython-34.pyc b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/compiler_cxx.cpython-34.pyc Binary files differnew file mode 100644 index 0000000..2dd9d4b --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/compiler_cxx.cpython-34.pyc diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/cxx.cpython-34.pyc b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/cxx.cpython-34.pyc Binary files differnew file mode 100644 index 0000000..804890e --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/cxx.cpython-34.pyc diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/gcc.cpython-34.pyc b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/gcc.cpython-34.pyc Binary files differnew file mode 100644 index 0000000..aa0fbc3 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/gcc.cpython-34.pyc diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/gxx.cpython-34.pyc b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/gxx.cpython-34.pyc Binary files differnew file mode 100644 index 0000000..ed640a5 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/gxx.cpython-34.pyc diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/icc.cpython-34.pyc b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/icc.cpython-34.pyc Binary files differnew file mode 100644 index 0000000..b03154d --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/icc.cpython-34.pyc diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/icpc.cpython-34.pyc b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/icpc.cpython-34.pyc Binary files differnew file mode 100644 index 0000000..0ca6fdb --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/__pycache__/icpc.cpython-34.pyc diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/ar.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/ar.py new file mode 100644 index 0000000..7a16dfe --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/ar.py @@ -0,0 +1,11 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +from waflib.Configure import conf +@conf +def find_ar(conf): + conf.load('ar') +def configure(conf): + conf.find_program('ar',var='AR') + conf.env.ARFLAGS='rcs' diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/asm.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/asm.py new file mode 100644 index 0000000..b9ed5f4 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/asm.py @@ -0,0 +1,25 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,sys +from waflib import Task,Utils +import waflib.Task +from waflib.Tools.ccroot import link_task,stlink_task +from waflib.TaskGen import extension,feature +class asm(Task.Task): + color='BLUE' + run_str='${AS} ${ASFLAGS} ${ASMPATH_ST:INCPATHS} ${AS_SRC_F}${SRC} ${AS_TGT_F}${TGT}' +@extension('.s','.S','.asm','.ASM','.spp','.SPP') +def asm_hook(self,node): + return self.create_compiled_task('asm',node) +class asmprogram(link_task): + run_str='${ASLINK} ${ASLINKFLAGS} ${ASLNK_TGT_F}${TGT} ${ASLNK_SRC_F}${SRC}' + ext_out=['.bin'] + inst_to='${BINDIR}' +class asmshlib(asmprogram): + inst_to='${LIBDIR}' +class asmstlib(stlink_task): + pass +def configure(conf): + conf.env['ASMPATH_ST']='-I%s' diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/bison.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/bison.py new file mode 100644 index 0000000..6ae7898 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/bison.py @@ -0,0 +1,28 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +from waflib import Task +from waflib.TaskGen import extension +class bison(Task.Task): + color='BLUE' + run_str='${BISON} ${BISONFLAGS} ${SRC[0].abspath()} -o ${TGT[0].name}' + ext_out=['.h'] +@extension('.y','.yc','.yy') +def big_bison(self,node): + has_h='-d'in self.env['BISONFLAGS'] + outs=[] + if node.name.endswith('.yc'): + outs.append(node.change_ext('.tab.cc')) + if has_h: + outs.append(node.change_ext('.tab.hh')) + else: + outs.append(node.change_ext('.tab.c')) + if has_h: + outs.append(node.change_ext('.tab.h')) + tsk=self.create_task('bison',node,outs) + tsk.cwd=node.parent.get_bld().abspath() + self.source.append(outs[0]) +def configure(conf): + conf.find_program('bison',var='BISON') + conf.env.BISONFLAGS=['-d'] diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c.py new file mode 100644 index 0000000..4d8cbd5 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c.py @@ -0,0 +1,24 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +from waflib import TaskGen,Task,Utils +from waflib.Tools import c_preproc +from waflib.Tools.ccroot import link_task,stlink_task +@TaskGen.extension('.c') +def c_hook(self,node): + return self.create_compiled_task('c',node) +class c(Task.Task): + run_str='${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT}' + vars=['CCDEPS'] + ext_in=['.h'] + scan=c_preproc.scan +class cprogram(link_task): + run_str='${LINK_CC} ${LINKFLAGS} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB}' + ext_out=['.bin'] + vars=['LINKDEPS'] + inst_to='${BINDIR}' +class cshlib(cprogram): + inst_to='${LIBDIR}' +class cstlib(stlink_task): + pass diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_aliases.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_aliases.py new file mode 100644 index 0000000..a3a2bb9 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_aliases.py @@ -0,0 +1,55 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,sys,re +from waflib import Utils,Build +from waflib.Configure import conf +def get_extensions(lst): + ret=[] + for x in Utils.to_list(lst): + try: + if not isinstance(x,str): + x=x.name + ret.append(x[x.rfind('.')+1:]) + except Exception: + pass + return ret +def sniff_features(**kw): + exts=get_extensions(kw['source']) + type=kw['_type'] + feats=[] + if'cxx'in exts or'cpp'in exts or'c++'in exts or'cc'in exts or'C'in exts: + feats.append('cxx') + if'c'in exts or'vala'in exts: + feats.append('c') + if'd'in exts: + feats.append('d') + if'java'in exts: + feats.append('java') + if'java'in exts: + return'java' + if type in['program','shlib','stlib']: + for x in feats: + if x in['cxx','d','c']: + feats.append(x+type) + return feats +def set_features(kw,_type): + kw['_type']=_type + kw['features']=Utils.to_list(kw.get('features',[]))+Utils.to_list(sniff_features(**kw)) +@conf +def program(bld,*k,**kw): + set_features(kw,'program') + return bld(*k,**kw) +@conf +def shlib(bld,*k,**kw): + set_features(kw,'shlib') + return bld(*k,**kw) +@conf +def stlib(bld,*k,**kw): + set_features(kw,'stlib') + return bld(*k,**kw) +@conf +def objects(bld,*k,**kw): + set_features(kw,'objects') + return bld(*k,**kw) diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_config.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_config.py new file mode 100755 index 0000000..30d9c36 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_config.py @@ -0,0 +1,751 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,re,shlex,sys +from waflib import Build,Utils,Task,Options,Logs,Errors,ConfigSet,Runner +from waflib.TaskGen import after_method,feature +from waflib.Configure import conf +WAF_CONFIG_H='config.h' +DEFKEYS='define_key' +INCKEYS='include_key' +cfg_ver={'atleast-version':'>=','exact-version':'==','max-version':'<=',} +SNIP_FUNCTION=''' +int main(int argc, char **argv) { + void *p; + (void)argc; (void)argv; + p=(void*)(%s); + return 0; +} +''' +SNIP_TYPE=''' +int main(int argc, char **argv) { + (void)argc; (void)argv; + if ((%(type_name)s *) 0) return 0; + if (sizeof (%(type_name)s)) return 0; + return 1; +} +''' +SNIP_EMPTY_PROGRAM=''' +int main(int argc, char **argv) { + (void)argc; (void)argv; + return 0; +} +''' +SNIP_FIELD=''' +int main(int argc, char **argv) { + char *off; + (void)argc; (void)argv; + off = (char*) &((%(type_name)s*)0)->%(field_name)s; + return (size_t) off < sizeof(%(type_name)s); +} +''' +MACRO_TO_DESTOS={'__linux__':'linux','__GNU__':'gnu','__FreeBSD__':'freebsd','__NetBSD__':'netbsd','__OpenBSD__':'openbsd','__sun':'sunos','__hpux':'hpux','__sgi':'irix','_AIX':'aix','__CYGWIN__':'cygwin','__MSYS__':'msys','_UWIN':'uwin','_WIN64':'win32','_WIN32':'win32','__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__':'darwin','__ENVIRONMENT_IPHONE_OS_VERSION_MIN_REQUIRED__':'darwin','__QNX__':'qnx','__native_client__':'nacl'} +MACRO_TO_DEST_CPU={'__x86_64__':'x86_64','__amd64__':'x86_64','__i386__':'x86','__ia64__':'ia','__mips__':'mips','__sparc__':'sparc','__alpha__':'alpha','__aarch64__':'aarch64','__thumb__':'thumb','__arm__':'arm','__hppa__':'hppa','__powerpc__':'powerpc','__ppc__':'powerpc','__convex__':'convex','__m68k__':'m68k','__s390x__':'s390x','__s390__':'s390','__sh__':'sh',} +@conf +def parse_flags(self,line,uselib_store,env=None,force_static=False): + assert(isinstance(line,str)) + env=env or self.env + app=env.append_value + appu=env.append_unique + lex=shlex.shlex(line,posix=False) + lex.whitespace_split=True + lex.commenters='' + lst=list(lex) + uselib=uselib_store + while lst: + x=lst.pop(0) + st=x[:2] + ot=x[2:] + if st=='-I'or st=='/I': + if not ot:ot=lst.pop(0) + appu('INCLUDES_'+uselib,[ot]) + elif st=='-include': + tmp=[x,lst.pop(0)] + app('CFLAGS',tmp) + app('CXXFLAGS',tmp) + elif st=='-D'or(env.CXX_NAME=='msvc'and st=='/D'): + if not ot:ot=lst.pop(0) + app('DEFINES_'+uselib,[ot]) + elif st=='-l': + if not ot:ot=lst.pop(0) + prefix=force_static and'STLIB_'or'LIB_' + appu(prefix+uselib,[ot]) + elif st=='-L': + if not ot:ot=lst.pop(0) + appu('LIBPATH_'+uselib,[ot]) + elif x.startswith('/LIBPATH:'): + appu('LIBPATH_'+uselib,[x.replace('/LIBPATH:','')]) + elif x=='-pthread'or x.startswith('+')or x.startswith('-std'): + app('CFLAGS_'+uselib,[x]) + app('CXXFLAGS_'+uselib,[x]) + app('LINKFLAGS_'+uselib,[x]) + elif x=='-framework': + appu('FRAMEWORK_'+uselib,[lst.pop(0)]) + elif x.startswith('-F'): + appu('FRAMEWORKPATH_'+uselib,[x[2:]]) + elif x.startswith('-Wl'): + app('LINKFLAGS_'+uselib,[x]) + elif x.startswith('-m')or x.startswith('-f')or x.startswith('-dynamic'): + app('CFLAGS_'+uselib,[x]) + app('CXXFLAGS_'+uselib,[x]) + elif x.startswith('-bundle'): + app('LINKFLAGS_'+uselib,[x]) + elif x.startswith('-undefined'): + arg=lst.pop(0) + app('LINKFLAGS_'+uselib,[x,arg]) + elif x.startswith('-arch')or x.startswith('-isysroot'): + tmp=[x,lst.pop(0)] + app('CFLAGS_'+uselib,tmp) + app('CXXFLAGS_'+uselib,tmp) + app('LINKFLAGS_'+uselib,tmp) + elif x.endswith('.a')or x.endswith('.so')or x.endswith('.dylib')or x.endswith('.lib'): + appu('LINKFLAGS_'+uselib,[x]) +@conf +def ret_msg(self,f,kw): + if isinstance(f,str): + return f + return f(kw) +@conf +def validate_cfg(self,kw): + if not'path'in kw: + if not self.env.PKGCONFIG: + self.find_program('pkg-config',var='PKGCONFIG') + kw['path']=self.env.PKGCONFIG + if'atleast_pkgconfig_version'in kw: + if not'msg'in kw: + kw['msg']='Checking for pkg-config version >= %r'%kw['atleast_pkgconfig_version'] + return + if not'okmsg'in kw: + kw['okmsg']='yes' + if not'errmsg'in kw: + kw['errmsg']='not found' + if'modversion'in kw: + if not'msg'in kw: + kw['msg']='Checking for %r version'%kw['modversion'] + return + for x in cfg_ver.keys(): + y=x.replace('-','_') + if y in kw: + if not'package'in kw: + raise ValueError('%s requires a package'%x) + if not'msg'in kw: + kw['msg']='Checking for %r %s %s'%(kw['package'],cfg_ver[x],kw[y]) + return + if not'msg'in kw: + kw['msg']='Checking for %r'%(kw['package']or kw['path']) +@conf +def exec_cfg(self,kw): + def define_it(): + self.define(self.have_define(kw.get('uselib_store',kw['package'])),1,0) + if'atleast_pkgconfig_version'in kw: + cmd=[kw['path'],'--atleast-pkgconfig-version=%s'%kw['atleast_pkgconfig_version']] + self.cmd_and_log(cmd) + if not'okmsg'in kw: + kw['okmsg']='yes' + return + for x in cfg_ver: + y=x.replace('-','_') + if y in kw: + self.cmd_and_log([kw['path'],'--%s=%s'%(x,kw[y]),kw['package']]) + if not'okmsg'in kw: + kw['okmsg']='yes' + define_it() + break + if'modversion'in kw: + version=self.cmd_and_log([kw['path'],'--modversion',kw['modversion']]).strip() + self.define('%s_VERSION'%Utils.quote_define_name(kw.get('uselib_store',kw['modversion'])),version) + return version + lst=[kw['path']] + defi=kw.get('define_variable',None) + if not defi: + defi=self.env.PKG_CONFIG_DEFINES or{} + for key,val in defi.items(): + lst.append('--define-variable=%s=%s'%(key,val)) + static=False + if'args'in kw: + args=Utils.to_list(kw['args']) + if'--static'in args or'--static-libs'in args: + static=True + lst+=args + lst.extend(Utils.to_list(kw['package'])) + if'variables'in kw: + env=kw.get('env',self.env) + uselib=kw.get('uselib_store',kw['package'].upper()) + vars=Utils.to_list(kw['variables']) + for v in vars: + val=self.cmd_and_log(lst+['--variable='+v]).strip() + var='%s_%s'%(uselib,v) + env[var]=val + if not'okmsg'in kw: + kw['okmsg']='yes' + return + ret=self.cmd_and_log(lst) + if not'okmsg'in kw: + kw['okmsg']='yes' + define_it() + self.parse_flags(ret,kw.get('uselib_store',kw['package'].upper()),kw.get('env',self.env),force_static=static) + return ret +@conf +def check_cfg(self,*k,**kw): + if k: + lst=k[0].split() + kw['package']=lst[0] + kw['args']=' '.join(lst[1:]) + self.validate_cfg(kw) + if'msg'in kw: + self.start_msg(kw['msg']) + ret=None + try: + ret=self.exec_cfg(kw) + except self.errors.WafError: + if'errmsg'in kw: + self.end_msg(kw['errmsg'],'YELLOW') + if Logs.verbose>1: + raise + else: + self.fatal('The configuration failed') + else: + if not ret: + ret=True + kw['success']=ret + if'okmsg'in kw: + self.end_msg(self.ret_msg(kw['okmsg'],kw)) + return ret +@conf +def validate_c(self,kw): + if not'env'in kw: + kw['env']=self.env.derive() + env=kw['env'] + if not'compiler'in kw and not'features'in kw: + kw['compiler']='c' + if env['CXX_NAME']and Task.classes.get('cxx',None): + kw['compiler']='cxx' + if not self.env['CXX']: + self.fatal('a c++ compiler is required') + else: + if not self.env['CC']: + self.fatal('a c compiler is required') + if not'compile_mode'in kw: + kw['compile_mode']='c' + if'cxx'in Utils.to_list(kw.get('features',[]))or kw.get('compiler','')=='cxx': + kw['compile_mode']='cxx' + if not'type'in kw: + kw['type']='cprogram' + if not'features'in kw: + kw['features']=[kw['compile_mode'],kw['type']] + else: + kw['features']=Utils.to_list(kw['features']) + if not'compile_filename'in kw: + kw['compile_filename']='test.c'+((kw['compile_mode']=='cxx')and'pp'or'') + def to_header(dct): + if'header_name'in dct: + dct=Utils.to_list(dct['header_name']) + return''.join(['#include <%s>\n'%x for x in dct]) + return'' + if'framework_name'in kw: + fwkname=kw['framework_name'] + if not'uselib_store'in kw: + kw['uselib_store']=fwkname.upper() + if not kw.get('no_header',False): + if not'header_name'in kw: + kw['header_name']=[] + fwk='%s/%s.h'%(fwkname,fwkname) + if kw.get('remove_dot_h',None): + fwk=fwk[:-2] + kw['header_name']=Utils.to_list(kw['header_name'])+[fwk] + kw['msg']='Checking for framework %s'%fwkname + kw['framework']=fwkname + if'function_name'in kw: + fu=kw['function_name'] + if not'msg'in kw: + kw['msg']='Checking for function %s'%fu + kw['code']=to_header(kw)+SNIP_FUNCTION%fu + if not'uselib_store'in kw: + kw['uselib_store']=fu.upper() + if not'define_name'in kw: + kw['define_name']=self.have_define(fu) + elif'type_name'in kw: + tu=kw['type_name'] + if not'header_name'in kw: + kw['header_name']='stdint.h' + if'field_name'in kw: + field=kw['field_name'] + kw['code']=to_header(kw)+SNIP_FIELD%{'type_name':tu,'field_name':field} + if not'msg'in kw: + kw['msg']='Checking for field %s in %s'%(field,tu) + if not'define_name'in kw: + kw['define_name']=self.have_define((tu+'_'+field).upper()) + else: + kw['code']=to_header(kw)+SNIP_TYPE%{'type_name':tu} + if not'msg'in kw: + kw['msg']='Checking for type %s'%tu + if not'define_name'in kw: + kw['define_name']=self.have_define(tu.upper()) + elif'header_name'in kw: + if not'msg'in kw: + kw['msg']='Checking for header %s'%kw['header_name'] + l=Utils.to_list(kw['header_name']) + assert len(l)>0,'list of headers in header_name is empty' + kw['code']=to_header(kw)+SNIP_EMPTY_PROGRAM + if not'uselib_store'in kw: + kw['uselib_store']=l[0].upper() + if not'define_name'in kw: + kw['define_name']=self.have_define(l[0]) + if'lib'in kw: + if not'msg'in kw: + kw['msg']='Checking for library %s'%kw['lib'] + if not'uselib_store'in kw: + kw['uselib_store']=kw['lib'].upper() + if'stlib'in kw: + if not'msg'in kw: + kw['msg']='Checking for static library %s'%kw['stlib'] + if not'uselib_store'in kw: + kw['uselib_store']=kw['stlib'].upper() + if'fragment'in kw: + kw['code']=kw['fragment'] + if not'msg'in kw: + kw['msg']='Checking for code snippet' + if not'errmsg'in kw: + kw['errmsg']='no' + for(flagsname,flagstype)in[('cxxflags','compiler'),('cflags','compiler'),('linkflags','linker')]: + if flagsname in kw: + if not'msg'in kw: + kw['msg']='Checking for %s flags %s'%(flagstype,kw[flagsname]) + if not'errmsg'in kw: + kw['errmsg']='no' + if not'execute'in kw: + kw['execute']=False + if kw['execute']: + kw['features'].append('test_exec') + if not'errmsg'in kw: + kw['errmsg']='not found' + if not'okmsg'in kw: + kw['okmsg']='yes' + if not'code'in kw: + kw['code']=SNIP_EMPTY_PROGRAM + if self.env[INCKEYS]: + kw['code']='\n'.join(['#include <%s>'%x for x in self.env[INCKEYS]])+'\n'+kw['code'] + if not kw.get('success'):kw['success']=None + if'define_name'in kw: + self.undefine(kw['define_name']) + if not'msg'in kw: + self.fatal('missing "msg" in conf.check(...)') +@conf +def post_check(self,*k,**kw): + is_success=0 + if kw['execute']: + if kw['success']is not None: + if kw.get('define_ret',False): + is_success=kw['success'] + else: + is_success=(kw['success']==0) + else: + is_success=(kw['success']==0) + if'define_name'in kw: + if'header_name'in kw or'function_name'in kw or'type_name'in kw or'fragment'in kw: + if kw['execute']and kw.get('define_ret',None)and isinstance(is_success,str): + self.define(kw['define_name'],is_success,quote=kw.get('quote',1)) + else: + self.define_cond(kw['define_name'],is_success) + else: + self.define_cond(kw['define_name'],is_success) + if'header_name'in kw: + if kw.get('auto_add_header_name',False): + self.env.append_value(INCKEYS,Utils.to_list(kw['header_name'])) + if is_success and'uselib_store'in kw: + from waflib.Tools import ccroot + _vars=set([]) + for x in kw['features']: + if x in ccroot.USELIB_VARS: + _vars|=ccroot.USELIB_VARS[x] + for k in _vars: + lk=k.lower() + if lk in kw: + val=kw[lk] + if isinstance(val,str): + val=val.rstrip(os.path.sep) + self.env.append_unique(k+'_'+kw['uselib_store'],Utils.to_list(val)) + return is_success +@conf +def check(self,*k,**kw): + self.validate_c(kw) + self.start_msg(kw['msg']) + ret=None + try: + ret=self.run_c_code(*k,**kw) + except self.errors.ConfigurationError: + self.end_msg(kw['errmsg'],'YELLOW') + if Logs.verbose>1: + raise + else: + self.fatal('The configuration failed') + else: + kw['success']=ret + ret=self.post_check(*k,**kw) + if not ret: + self.end_msg(kw['errmsg'],'YELLOW') + self.fatal('The configuration failed %r'%ret) + else: + self.end_msg(self.ret_msg(kw['okmsg'],kw)) + return ret +class test_exec(Task.Task): + color='PINK' + def run(self): + if getattr(self.generator,'rpath',None): + if getattr(self.generator,'define_ret',False): + self.generator.bld.retval=self.generator.bld.cmd_and_log([self.inputs[0].abspath()]) + else: + self.generator.bld.retval=self.generator.bld.exec_command([self.inputs[0].abspath()]) + else: + env=self.env.env or{} + env.update(dict(os.environ)) + for var in('LD_LIBRARY_PATH','DYLD_LIBRARY_PATH','PATH'): + env[var]=self.inputs[0].parent.abspath()+os.path.pathsep+env.get(var,'') + if getattr(self.generator,'define_ret',False): + self.generator.bld.retval=self.generator.bld.cmd_and_log([self.inputs[0].abspath()],env=env) + else: + self.generator.bld.retval=self.generator.bld.exec_command([self.inputs[0].abspath()],env=env) +@feature('test_exec') +@after_method('apply_link') +def test_exec_fun(self): + self.create_task('test_exec',self.link_task.outputs[0]) +CACHE_RESULTS=1 +COMPILE_ERRORS=2 +@conf +def run_c_code(self,*k,**kw): + lst=[str(v)for(p,v)in kw.items()if p!='env'] + h=Utils.h_list(lst) + dir=self.bldnode.abspath()+os.sep+(not Utils.is_win32 and'.'or'')+'conf_check_'+Utils.to_hex(h) + try: + os.makedirs(dir) + except OSError: + pass + try: + os.stat(dir) + except OSError: + self.fatal('cannot use the configuration test folder %r'%dir) + cachemode=getattr(Options.options,'confcache',None) + if cachemode==CACHE_RESULTS: + try: + proj=ConfigSet.ConfigSet(os.path.join(dir,'cache_run_c_code')) + except OSError: + pass + else: + ret=proj['cache_run_c_code'] + if isinstance(ret,str)and ret.startswith('Test does not build'): + self.fatal(ret) + return ret + bdir=os.path.join(dir,'testbuild') + if not os.path.exists(bdir): + os.makedirs(bdir) + self.test_bld=bld=Build.BuildContext(top_dir=dir,out_dir=bdir) + bld.init_dirs() + bld.progress_bar=0 + bld.targets='*' + if kw['compile_filename']: + node=bld.srcnode.make_node(kw['compile_filename']) + node.write(kw['code']) + bld.logger=self.logger + bld.all_envs.update(self.all_envs) + bld.env=kw['env'] + o=bld(features=kw['features'],source=kw['compile_filename'],target='testprog') + for k,v in kw.items(): + setattr(o,k,v) + self.to_log("==>\n%s\n<=="%kw['code']) + bld.targets='*' + ret=-1 + try: + try: + bld.compile() + except Errors.WafError: + ret='Test does not build: %s'%Utils.ex_stack() + self.fatal(ret) + else: + ret=getattr(bld,'retval',0) + finally: + proj=ConfigSet.ConfigSet() + proj['cache_run_c_code']=ret + proj.store(os.path.join(dir,'cache_run_c_code')) + return ret +@conf +def check_cxx(self,*k,**kw): + kw['compiler']='cxx' + return self.check(*k,**kw) +@conf +def check_cc(self,*k,**kw): + kw['compiler']='c' + return self.check(*k,**kw) +@conf +def define(self,key,val,quote=True): + assert key and isinstance(key,str) + if val is True: + val=1 + elif val in(False,None): + val=0 + if isinstance(val,int)or isinstance(val,float): + s='%s=%s' + else: + s=quote and'%s="%s"'or'%s=%s' + app=s%(key,str(val)) + ban=key+'=' + lst=self.env['DEFINES'] + for x in lst: + if x.startswith(ban): + lst[lst.index(x)]=app + break + else: + self.env.append_value('DEFINES',app) + self.env.append_unique(DEFKEYS,key) +@conf +def undefine(self,key): + assert key and isinstance(key,str) + ban=key+'=' + lst=[x for x in self.env['DEFINES']if not x.startswith(ban)] + self.env['DEFINES']=lst + self.env.append_unique(DEFKEYS,key) +@conf +def define_cond(self,key,val): + assert key and isinstance(key,str) + if val: + self.define(key,1) + else: + self.undefine(key) +@conf +def is_defined(self,key): + assert key and isinstance(key,str) + ban=key+'=' + for x in self.env['DEFINES']: + if x.startswith(ban): + return True + return False +@conf +def get_define(self,key): + assert key and isinstance(key,str) + ban=key+'=' + for x in self.env['DEFINES']: + if x.startswith(ban): + return x[len(ban):] + return None +@conf +def have_define(self,key): + return(self.env.HAVE_PAT or'HAVE_%s')%Utils.quote_define_name(key) +@conf +def write_config_header(self,configfile='',guard='',top=False,env=None,defines=True,headers=False,remove=True,define_prefix=''): + if env: + Logs.warn('Cannot pass env to write_config_header') + if not configfile:configfile=WAF_CONFIG_H + waf_guard=guard or'W_%s_WAF'%Utils.quote_define_name(configfile) + node=top and self.bldnode or self.path.get_bld() + node=node.make_node(configfile) + node.parent.mkdir() + lst=['/* WARNING! All changes made to this file will be lost! */\n'] + lst.append('#ifndef %s\n#define %s\n'%(waf_guard,waf_guard)) + lst.append(self.get_config_header(defines,headers,define_prefix=define_prefix)) + lst.append('\n#endif /* %s */\n'%waf_guard) + node.write('\n'.join(lst)) + self.env.append_unique(Build.CFG_FILES,[node.abspath()]) + if remove: + for key in self.env[DEFKEYS]: + self.undefine(key) + self.env[DEFKEYS]=[] +@conf +def get_config_header(self,defines=True,headers=False,define_prefix=''): + lst=[] + if headers: + for x in self.env[INCKEYS]: + lst.append('#include <%s>'%x) + if defines: + for x in self.env[DEFKEYS]: + if self.is_defined(x): + val=self.get_define(x) + lst.append('#define %s %s'%(define_prefix+x,val)) + else: + lst.append('/* #undef %s */'%(define_prefix+x)) + return"\n".join(lst) +@conf +def cc_add_flags(conf): + conf.add_os_flags('CPPFLAGS','CFLAGS') + conf.add_os_flags('CFLAGS') +@conf +def cxx_add_flags(conf): + conf.add_os_flags('CPPFLAGS','CXXFLAGS') + conf.add_os_flags('CXXFLAGS') +@conf +def link_add_flags(conf): + conf.add_os_flags('LINKFLAGS') + conf.add_os_flags('LDFLAGS','LINKFLAGS') +@conf +def cc_load_tools(conf): + if not conf.env.DEST_OS: + conf.env.DEST_OS=Utils.unversioned_sys_platform() + conf.load('c') +@conf +def cxx_load_tools(conf): + if not conf.env.DEST_OS: + conf.env.DEST_OS=Utils.unversioned_sys_platform() + conf.load('cxx') +@conf +def get_cc_version(conf,cc,gcc=False,icc=False): + cmd=cc+['-dM','-E','-'] + env=conf.env.env or None + try: + p=Utils.subprocess.Popen(cmd,stdin=Utils.subprocess.PIPE,stdout=Utils.subprocess.PIPE,stderr=Utils.subprocess.PIPE,env=env) + p.stdin.write('\n'.encode()) + out=p.communicate()[0] + except Exception: + conf.fatal('Could not determine the compiler version %r'%cmd) + if not isinstance(out,str): + out=out.decode(sys.stdout.encoding or'iso8859-1') + if gcc: + if out.find('__INTEL_COMPILER')>=0: + conf.fatal('The intel compiler pretends to be gcc') + if out.find('__GNUC__')<0 and out.find('__clang__')<0: + conf.fatal('Could not determine the compiler type') + if icc and out.find('__INTEL_COMPILER')<0: + conf.fatal('Not icc/icpc') + k={} + if icc or gcc: + out=out.splitlines() + for line in out: + lst=shlex.split(line) + if len(lst)>2: + key=lst[1] + val=lst[2] + k[key]=val + def isD(var): + return var in k + def isT(var): + return var in k and k[var]!='0' + if not conf.env.DEST_OS: + conf.env.DEST_OS='' + for i in MACRO_TO_DESTOS: + if isD(i): + conf.env.DEST_OS=MACRO_TO_DESTOS[i] + break + else: + if isD('__APPLE__')and isD('__MACH__'): + conf.env.DEST_OS='darwin' + elif isD('__unix__'): + conf.env.DEST_OS='generic' + if isD('__ELF__'): + conf.env.DEST_BINFMT='elf' + elif isD('__WINNT__')or isD('__CYGWIN__')or isD('_WIN32'): + conf.env.DEST_BINFMT='pe' + conf.env.LIBDIR=conf.env.BINDIR + elif isD('__APPLE__'): + conf.env.DEST_BINFMT='mac-o' + if not conf.env.DEST_BINFMT: + conf.env.DEST_BINFMT=Utils.destos_to_binfmt(conf.env.DEST_OS) + for i in MACRO_TO_DEST_CPU: + if isD(i): + conf.env.DEST_CPU=MACRO_TO_DEST_CPU[i] + break + Logs.debug('ccroot: dest platform: '+' '.join([conf.env[x]or'?'for x in('DEST_OS','DEST_BINFMT','DEST_CPU')])) + if icc: + ver=k['__INTEL_COMPILER'] + conf.env['CC_VERSION']=(ver[:-2],ver[-2],ver[-1]) + else: + if isD('__clang__'): + conf.env['CC_VERSION']=(k['__clang_major__'],k['__clang_minor__'],k['__clang_patchlevel__']) + else: + try: + conf.env['CC_VERSION']=(k['__GNUC__'],k['__GNUC_MINOR__'],k['__GNUC_PATCHLEVEL__']) + except KeyError: + conf.env['CC_VERSION']=(k['__GNUC__'],k['__GNUC_MINOR__'],0) + return k +@conf +def get_xlc_version(conf,cc): + cmd=cc+['-qversion'] + try: + out,err=conf.cmd_and_log(cmd,output=0) + except Errors.WafError: + conf.fatal('Could not find xlc %r'%cmd) + for v in(r"IBM XL C/C\+\+.* V(?P<major>\d*)\.(?P<minor>\d*)",): + version_re=re.compile(v,re.I).search + match=version_re(out or err) + if match: + k=match.groupdict() + conf.env['CC_VERSION']=(k['major'],k['minor']) + break + else: + conf.fatal('Could not determine the XLC version.') +@conf +def get_suncc_version(conf,cc): + cmd=cc+['-V'] + try: + out,err=conf.cmd_and_log(cmd,output=0) + except Errors.WafError as e: + if not(hasattr(e,'returncode')and hasattr(e,'stdout')and hasattr(e,'stderr')): + conf.fatal('Could not find suncc %r'%cmd) + out=e.stdout + err=e.stderr + version=(out or err) + version=version.split('\n')[0] + version_re=re.compile(r'cc:\s+sun\s+(c\+\+|c)\s+(?P<major>\d*)\.(?P<minor>\d*)',re.I).search + match=version_re(version) + if match: + k=match.groupdict() + conf.env['CC_VERSION']=(k['major'],k['minor']) + else: + conf.fatal('Could not determine the suncc version.') +@conf +def add_as_needed(self): + if self.env.DEST_BINFMT=='elf'and'gcc'in(self.env.CXX_NAME,self.env.CC_NAME): + self.env.append_unique('LINKFLAGS','--as-needed') +class cfgtask(Task.TaskBase): + def display(self): + return'' + def runnable_status(self): + return Task.RUN_ME + def uid(self): + return Utils.SIG_NIL + def run(self): + conf=self.conf + bld=Build.BuildContext(top_dir=conf.srcnode.abspath(),out_dir=conf.bldnode.abspath()) + bld.env=conf.env + bld.init_dirs() + bld.in_msg=1 + bld.logger=self.logger + try: + bld.check(**self.args) + except Exception: + return 1 +@conf +def multicheck(self,*k,**kw): + self.start_msg(kw.get('msg','Executing %d configuration tests'%len(k))) + class par(object): + def __init__(self): + self.keep=False + self.cache_global=Options.cache_global + self.nocache=Options.options.nocache + self.returned_tasks=[] + self.task_sigs={} + def total(self): + return len(tasks) + def to_log(self,*k,**kw): + return + bld=par() + tasks=[] + for dct in k: + x=cfgtask(bld=bld) + tasks.append(x) + x.args=dct + x.bld=bld + x.conf=self + x.args=dct + x.logger=Logs.make_mem_logger(str(id(x)),self.logger) + def it(): + yield tasks + while 1: + yield[] + p=Runner.Parallel(bld,Options.options.jobs) + p.biter=it() + p.start() + for x in tasks: + x.logger.memhandler.flush() + for x in tasks: + if x.hasrun!=Task.SUCCESS: + self.end_msg(kw.get('errmsg','no'),color='YELLOW') + self.fatal(kw.get('fatalmsg',None)or'One of the tests has failed, see the config.log for more information') + self.end_msg('ok') diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_osx.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_osx.py new file mode 100644 index 0000000..579b2a7 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_osx.py @@ -0,0 +1,120 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,shutil,sys,platform +from waflib import TaskGen,Task,Build,Options,Utils,Errors +from waflib.TaskGen import taskgen_method,feature,after_method,before_method +app_info=''' +<?xml version="1.0" encoding="UTF-8"?> +<!DOCTYPE plist SYSTEM "file://localhost/System/Library/DTDs/PropertyList.dtd"> +<plist version="0.9"> +<dict> + <key>CFBundlePackageType</key> + <string>APPL</string> + <key>CFBundleGetInfoString</key> + <string>Created by Waf</string> + <key>CFBundleSignature</key> + <string>????</string> + <key>NOTE</key> + <string>THIS IS A GENERATED FILE, DO NOT MODIFY</string> + <key>CFBundleExecutable</key> + <string>%s</string> +</dict> +</plist> +''' +@feature('c','cxx') +def set_macosx_deployment_target(self): + if self.env['MACOSX_DEPLOYMENT_TARGET']: + os.environ['MACOSX_DEPLOYMENT_TARGET']=self.env['MACOSX_DEPLOYMENT_TARGET'] + elif'MACOSX_DEPLOYMENT_TARGET'not in os.environ: + if Utils.unversioned_sys_platform()=='darwin': + os.environ['MACOSX_DEPLOYMENT_TARGET']='.'.join(platform.mac_ver()[0].split('.')[:2]) +@taskgen_method +def create_bundle_dirs(self,name,out): + bld=self.bld + dir=out.parent.find_or_declare(name) + dir.mkdir() + macos=dir.find_or_declare(['Contents','MacOS']) + macos.mkdir() + return dir +def bundle_name_for_output(out): + name=out.name + k=name.rfind('.') + if k>=0: + name=name[:k]+'.app' + else: + name=name+'.app' + return name +@feature('cprogram','cxxprogram') +@after_method('apply_link') +def create_task_macapp(self): + if self.env['MACAPP']or getattr(self,'mac_app',False): + out=self.link_task.outputs[0] + name=bundle_name_for_output(out) + dir=self.create_bundle_dirs(name,out) + n1=dir.find_or_declare(['Contents','MacOS',out.name]) + self.apptask=self.create_task('macapp',self.link_task.outputs,n1) + inst_to=getattr(self,'install_path','/Applications')+'/%s/Contents/MacOS/'%name + self.bld.install_files(inst_to,n1,chmod=Utils.O755) + if getattr(self,'mac_resources',None): + res_dir=n1.parent.parent.make_node('Resources') + inst_to=getattr(self,'install_path','/Applications')+'/%s/Resources'%name + for x in self.to_list(self.mac_resources): + node=self.path.find_node(x) + if not node: + raise Errors.WafError('Missing mac_resource %r in %r'%(x,self)) + parent=node.parent + if os.path.isdir(node.abspath()): + nodes=node.ant_glob('**') + else: + nodes=[node] + for node in nodes: + rel=node.path_from(parent) + tsk=self.create_task('macapp',node,res_dir.make_node(rel)) + self.bld.install_as(inst_to+'/%s'%rel,node) + if getattr(self.bld,'is_install',None): + self.install_task.hasrun=Task.SKIP_ME +@feature('cprogram','cxxprogram') +@after_method('apply_link') +def create_task_macplist(self): + if self.env['MACAPP']or getattr(self,'mac_app',False): + out=self.link_task.outputs[0] + name=bundle_name_for_output(out) + dir=self.create_bundle_dirs(name,out) + n1=dir.find_or_declare(['Contents','Info.plist']) + self.plisttask=plisttask=self.create_task('macplist',[],n1) + if getattr(self,'mac_plist',False): + node=self.path.find_resource(self.mac_plist) + if node: + plisttask.inputs.append(node) + else: + plisttask.code=self.mac_plist + else: + plisttask.code=app_info%self.link_task.outputs[0].name + inst_to=getattr(self,'install_path','/Applications')+'/%s/Contents/'%name + self.bld.install_files(inst_to,n1) +@feature('cshlib','cxxshlib') +@before_method('apply_link','propagate_uselib_vars') +def apply_bundle(self): + if self.env['MACBUNDLE']or getattr(self,'mac_bundle',False): + self.env['LINKFLAGS_cshlib']=self.env['LINKFLAGS_cxxshlib']=[] + self.env['cshlib_PATTERN']=self.env['cxxshlib_PATTERN']=self.env['macbundle_PATTERN'] + use=self.use=self.to_list(getattr(self,'use',[])) + if not'MACBUNDLE'in use: + use.append('MACBUNDLE') +app_dirs=['Contents','Contents/MacOS','Contents/Resources'] +class macapp(Task.Task): + color='PINK' + def run(self): + self.outputs[0].parent.mkdir() + shutil.copy2(self.inputs[0].srcpath(),self.outputs[0].abspath()) +class macplist(Task.Task): + color='PINK' + ext_in=['.bin'] + def run(self): + if getattr(self,'code',None): + txt=self.code + else: + txt=self.inputs[0].read() + self.outputs[0].write(txt) diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_preproc.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_preproc.py new file mode 100644 index 0000000..c94ff88 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_preproc.py @@ -0,0 +1,607 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import re,string,traceback +from waflib import Logs,Utils,Errors +from waflib.Logs import debug,error +class PreprocError(Errors.WafError): + pass +POPFILE='-' +recursion_limit=150 +go_absolute=False +standard_includes=['/usr/include'] +if Utils.is_win32: + standard_includes=[] +use_trigraphs=0 +strict_quotes=0 +g_optrans={'not':'!','and':'&&','bitand':'&','and_eq':'&=','or':'||','bitor':'|','or_eq':'|=','xor':'^','xor_eq':'^=','compl':'~',} +re_lines=re.compile('^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',re.IGNORECASE|re.MULTILINE) +re_mac=re.compile("^[a-zA-Z_]\w*") +re_fun=re.compile('^[a-zA-Z_][a-zA-Z0-9_]*[(]') +re_pragma_once=re.compile('^\s*once\s*',re.IGNORECASE) +re_nl=re.compile('\\\\\r*\n',re.MULTILINE) +re_cpp=re.compile(r'//.*?$|/\*.*?\*/|\'(?:\\.|[^\\\'])*\'|"(?:\\.|[^\\"])*"',re.DOTALL|re.MULTILINE) +trig_def=[('??'+a,b)for a,b in zip("=-/!'()<>",r'#~\|^[]{}')] +chr_esc={'0':0,'a':7,'b':8,'t':9,'n':10,'f':11,'v':12,'r':13,'\\':92,"'":39} +NUM='i' +OP='O' +IDENT='T' +STR='s' +CHAR='c' +tok_types=[NUM,STR,IDENT,OP] +exp_types=[r"""0[xX](?P<hex>[a-fA-F0-9]+)(?P<qual1>[uUlL]*)|L*?'(?P<char>(\\.|[^\\'])+)'|(?P<n1>\d+)[Ee](?P<exp0>[+-]*?\d+)(?P<float0>[fFlL]*)|(?P<n2>\d*\.\d+)([Ee](?P<exp1>[+-]*?\d+))?(?P<float1>[fFlL]*)|(?P<n4>\d+\.\d*)([Ee](?P<exp2>[+-]*?\d+))?(?P<float2>[fFlL]*)|(?P<oct>0*)(?P<n0>\d+)(?P<qual2>[uUlL]*)""",r'L?"([^"\\]|\\.)*"',r'[a-zA-Z_]\w*',r'%:%:|<<=|>>=|\.\.\.|<<|<%|<:|<=|>>|>=|\+\+|\+=|--|->|-=|\*=|/=|%:|%=|%>|==|&&|&=|\|\||\|=|\^=|:>|!=|##|[\(\)\{\}\[\]<>\?\|\^\*\+&=:!#;,%/\-\?\~\.]',] +re_clexer=re.compile('|'.join(["(?P<%s>%s)"%(name,part)for name,part in zip(tok_types,exp_types)]),re.M) +accepted='a' +ignored='i' +undefined='u' +skipped='s' +def repl(m): + s=m.group(0) + if s.startswith('/'): + return' ' + return s +def filter_comments(filename): + code=Utils.readf(filename) + if use_trigraphs: + for(a,b)in trig_def:code=code.split(a).join(b) + code=re_nl.sub('',code) + code=re_cpp.sub(repl,code) + return[(m.group(2),m.group(3))for m in re.finditer(re_lines,code)] +prec={} +ops=['* / %','+ -','<< >>','< <= >= >','== !=','& | ^','&& ||',','] +for x in range(len(ops)): + syms=ops[x] + for u in syms.split(): + prec[u]=x +def trimquotes(s): + if not s:return'' + s=s.rstrip() + if s[0]=="'"and s[-1]=="'":return s[1:-1] + return s +def reduce_nums(val_1,val_2,val_op): + try:a=0+val_1 + except TypeError:a=int(val_1) + try:b=0+val_2 + except TypeError:b=int(val_2) + d=val_op + if d=='%':c=a%b + elif d=='+':c=a+b + elif d=='-':c=a-b + elif d=='*':c=a*b + elif d=='/':c=a/b + elif d=='^':c=a^b + elif d=='|':c=a|b + elif d=='||':c=int(a or b) + elif d=='&':c=a&b + elif d=='&&':c=int(a and b) + elif d=='==':c=int(a==b) + elif d=='!=':c=int(a!=b) + elif d=='<=':c=int(a<=b) + elif d=='<':c=int(a<b) + elif d=='>':c=int(a>b) + elif d=='>=':c=int(a>=b) + elif d=='^':c=int(a^b) + elif d=='<<':c=a<<b + elif d=='>>':c=a>>b + else:c=0 + return c +def get_num(lst): + if not lst:raise PreprocError("empty list for get_num") + (p,v)=lst[0] + if p==OP: + if v=='(': + count_par=1 + i=1 + while i<len(lst): + (p,v)=lst[i] + if p==OP: + if v==')': + count_par-=1 + if count_par==0: + break + elif v=='(': + count_par+=1 + i+=1 + else: + raise PreprocError("rparen expected %r"%lst) + (num,_)=get_term(lst[1:i]) + return(num,lst[i+1:]) + elif v=='+': + return get_num(lst[1:]) + elif v=='-': + num,lst=get_num(lst[1:]) + return(reduce_nums('-1',num,'*'),lst) + elif v=='!': + num,lst=get_num(lst[1:]) + return(int(not int(num)),lst) + elif v=='~': + num,lst=get_num(lst[1:]) + return(~int(num),lst) + else: + raise PreprocError("Invalid op token %r for get_num"%lst) + elif p==NUM: + return v,lst[1:] + elif p==IDENT: + return 0,lst[1:] + else: + raise PreprocError("Invalid token %r for get_num"%lst) +def get_term(lst): + if not lst:raise PreprocError("empty list for get_term") + num,lst=get_num(lst) + if not lst: + return(num,[]) + (p,v)=lst[0] + if p==OP: + if v==',': + return get_term(lst[1:]) + elif v=='?': + count_par=0 + i=1 + while i<len(lst): + (p,v)=lst[i] + if p==OP: + if v==')': + count_par-=1 + elif v=='(': + count_par+=1 + elif v==':': + if count_par==0: + break + i+=1 + else: + raise PreprocError("rparen expected %r"%lst) + if int(num): + return get_term(lst[1:i]) + else: + return get_term(lst[i+1:]) + else: + num2,lst=get_num(lst[1:]) + if not lst: + num2=reduce_nums(num,num2,v) + return get_term([(NUM,num2)]+lst) + p2,v2=lst[0] + if p2!=OP: + raise PreprocError("op expected %r"%lst) + if prec[v2]>=prec[v]: + num2=reduce_nums(num,num2,v) + return get_term([(NUM,num2)]+lst) + else: + num3,lst=get_num(lst[1:]) + num3=reduce_nums(num2,num3,v2) + return get_term([(NUM,num),(p,v),(NUM,num3)]+lst) + raise PreprocError("cannot reduce %r"%lst) +def reduce_eval(lst): + num,lst=get_term(lst) + return(NUM,num) +def stringize(lst): + lst=[str(v2)for(p2,v2)in lst] + return"".join(lst) +def paste_tokens(t1,t2): + p1=None + if t1[0]==OP and t2[0]==OP: + p1=OP + elif t1[0]==IDENT and(t2[0]==IDENT or t2[0]==NUM): + p1=IDENT + elif t1[0]==NUM and t2[0]==NUM: + p1=NUM + if not p1: + raise PreprocError('tokens do not make a valid paste %r and %r'%(t1,t2)) + return(p1,t1[1]+t2[1]) +def reduce_tokens(lst,defs,ban=[]): + i=0 + while i<len(lst): + (p,v)=lst[i] + if p==IDENT and v=="defined": + del lst[i] + if i<len(lst): + (p2,v2)=lst[i] + if p2==IDENT: + if v2 in defs: + lst[i]=(NUM,1) + else: + lst[i]=(NUM,0) + elif p2==OP and v2=='(': + del lst[i] + (p2,v2)=lst[i] + del lst[i] + if v2 in defs: + lst[i]=(NUM,1) + else: + lst[i]=(NUM,0) + else: + raise PreprocError("Invalid define expression %r"%lst) + elif p==IDENT and v in defs: + if isinstance(defs[v],str): + a,b=extract_macro(defs[v]) + defs[v]=b + macro_def=defs[v] + to_add=macro_def[1] + if isinstance(macro_def[0],list): + del lst[i] + accu=to_add[:] + reduce_tokens(accu,defs,ban+[v]) + for x in range(len(accu)): + lst.insert(i,accu[x]) + i+=1 + else: + args=[] + del lst[i] + if i>=len(lst): + raise PreprocError("expected '(' after %r (got nothing)"%v) + (p2,v2)=lst[i] + if p2!=OP or v2!='(': + raise PreprocError("expected '(' after %r"%v) + del lst[i] + one_param=[] + count_paren=0 + while i<len(lst): + p2,v2=lst[i] + del lst[i] + if p2==OP and count_paren==0: + if v2=='(': + one_param.append((p2,v2)) + count_paren+=1 + elif v2==')': + if one_param:args.append(one_param) + break + elif v2==',': + if not one_param:raise PreprocError("empty param in funcall %s"%v) + args.append(one_param) + one_param=[] + else: + one_param.append((p2,v2)) + else: + one_param.append((p2,v2)) + if v2=='(':count_paren+=1 + elif v2==')':count_paren-=1 + else: + raise PreprocError('malformed macro') + accu=[] + arg_table=macro_def[0] + j=0 + while j<len(to_add): + (p2,v2)=to_add[j] + if p2==OP and v2=='#': + if j+1<len(to_add)and to_add[j+1][0]==IDENT and to_add[j+1][1]in arg_table: + toks=args[arg_table[to_add[j+1][1]]] + accu.append((STR,stringize(toks))) + j+=1 + else: + accu.append((p2,v2)) + elif p2==OP and v2=='##': + if accu and j+1<len(to_add): + t1=accu[-1] + if to_add[j+1][0]==IDENT and to_add[j+1][1]in arg_table: + toks=args[arg_table[to_add[j+1][1]]] + if toks: + accu[-1]=paste_tokens(t1,toks[0]) + accu.extend(toks[1:]) + else: + accu.append((p2,v2)) + accu.extend(toks) + elif to_add[j+1][0]==IDENT and to_add[j+1][1]=='__VA_ARGS__': + va_toks=[] + st=len(macro_def[0]) + pt=len(args) + for x in args[pt-st+1:]: + va_toks.extend(x) + va_toks.append((OP,',')) + if va_toks:va_toks.pop() + if len(accu)>1: + (p3,v3)=accu[-1] + (p4,v4)=accu[-2] + if v3=='##': + accu.pop() + if v4==','and pt<st: + accu.pop() + accu+=va_toks + else: + accu[-1]=paste_tokens(t1,to_add[j+1]) + j+=1 + else: + accu.append((p2,v2)) + elif p2==IDENT and v2 in arg_table: + toks=args[arg_table[v2]] + reduce_tokens(toks,defs,ban+[v]) + accu.extend(toks) + else: + accu.append((p2,v2)) + j+=1 + reduce_tokens(accu,defs,ban+[v]) + for x in range(len(accu)-1,-1,-1): + lst.insert(i,accu[x]) + i+=1 +def eval_macro(lst,defs): + reduce_tokens(lst,defs,[]) + if not lst:raise PreprocError("missing tokens to evaluate") + (p,v)=reduce_eval(lst) + return int(v)!=0 +def extract_macro(txt): + t=tokenize(txt) + if re_fun.search(txt): + p,name=t[0] + p,v=t[1] + if p!=OP:raise PreprocError("expected open parenthesis") + i=1 + pindex=0 + params={} + prev='(' + while 1: + i+=1 + p,v=t[i] + if prev=='(': + if p==IDENT: + params[v]=pindex + pindex+=1 + prev=p + elif p==OP and v==')': + break + else: + raise PreprocError("unexpected token (3)") + elif prev==IDENT: + if p==OP and v==',': + prev=v + elif p==OP and v==')': + break + else: + raise PreprocError("comma or ... expected") + elif prev==',': + if p==IDENT: + params[v]=pindex + pindex+=1 + prev=p + elif p==OP and v=='...': + raise PreprocError("not implemented (1)") + else: + raise PreprocError("comma or ... expected (2)") + elif prev=='...': + raise PreprocError("not implemented (2)") + else: + raise PreprocError("unexpected else") + return(name,[params,t[i+1:]]) + else: + (p,v)=t[0] + if len(t)>1: + return(v,[[],t[1:]]) + else: + return(v,[[],[('T','')]]) +re_include=re.compile('^\s*(<(?P<a>.*)>|"(?P<b>.*)")') +def extract_include(txt,defs): + m=re_include.search(txt) + if m: + if m.group('a'):return'<',m.group('a') + if m.group('b'):return'"',m.group('b') + toks=tokenize(txt) + reduce_tokens(toks,defs,['waf_include']) + if not toks: + raise PreprocError("could not parse include %s"%txt) + if len(toks)==1: + if toks[0][0]==STR: + return'"',toks[0][1] + else: + if toks[0][1]=='<'and toks[-1][1]=='>': + return stringize(toks).lstrip('<').rstrip('>') + raise PreprocError("could not parse include %s."%txt) +def parse_char(txt): + if not txt:raise PreprocError("attempted to parse a null char") + if txt[0]!='\\': + return ord(txt) + c=txt[1] + if c=='x': + if len(txt)==4 and txt[3]in string.hexdigits:return int(txt[2:],16) + return int(txt[2:],16) + elif c.isdigit(): + if c=='0'and len(txt)==2:return 0 + for i in 3,2,1: + if len(txt)>i and txt[1:1+i].isdigit(): + return(1+i,int(txt[1:1+i],8)) + else: + try:return chr_esc[c] + except KeyError:raise PreprocError("could not parse char literal '%s'"%txt) +def tokenize(s): + return tokenize_private(s)[:] +@Utils.run_once +def tokenize_private(s): + ret=[] + for match in re_clexer.finditer(s): + m=match.group + for name in tok_types: + v=m(name) + if v: + if name==IDENT: + try:v=g_optrans[v];name=OP + except KeyError: + if v.lower()=="true": + v=1 + name=NUM + elif v.lower()=="false": + v=0 + name=NUM + elif name==NUM: + if m('oct'):v=int(v,8) + elif m('hex'):v=int(m('hex'),16) + elif m('n0'):v=m('n0') + else: + v=m('char') + if v:v=parse_char(v) + else:v=m('n2')or m('n4') + elif name==OP: + if v=='%:':v='#' + elif v=='%:%:':v='##' + elif name==STR: + v=v[1:-1] + ret.append((name,v)) + break + return ret +@Utils.run_once +def define_name(line): + return re_mac.match(line).group(0) +class c_parser(object): + def __init__(self,nodepaths=None,defines=None): + self.lines=[] + if defines is None: + self.defs={} + else: + self.defs=dict(defines) + self.state=[] + self.count_files=0 + self.currentnode_stack=[] + self.nodepaths=nodepaths or[] + self.nodes=[] + self.names=[] + self.curfile='' + self.ban_includes=set([]) + def cached_find_resource(self,node,filename): + try: + nd=node.ctx.cache_nd + except AttributeError: + nd=node.ctx.cache_nd={} + tup=(node,filename) + try: + return nd[tup] + except KeyError: + ret=node.find_resource(filename) + if ret: + if getattr(ret,'children',None): + ret=None + elif ret.is_child_of(node.ctx.bldnode): + tmp=node.ctx.srcnode.search_node(ret.path_from(node.ctx.bldnode)) + if tmp and getattr(tmp,'children',None): + ret=None + nd[tup]=ret + return ret + def tryfind(self,filename): + self.curfile=filename + found=self.cached_find_resource(self.currentnode_stack[-1],filename) + for n in self.nodepaths: + if found: + break + found=self.cached_find_resource(n,filename) + if found and not found in self.ban_includes: + self.nodes.append(found) + if filename[-4:]!='.moc': + self.addlines(found) + else: + if not filename in self.names: + self.names.append(filename) + return found + def addlines(self,node): + self.currentnode_stack.append(node.parent) + filepath=node.abspath() + self.count_files+=1 + if self.count_files>recursion_limit: + raise PreprocError("recursion limit exceeded") + pc=self.parse_cache + debug('preproc: reading file %r',filepath) + try: + lns=pc[filepath] + except KeyError: + pass + else: + self.lines.extend(lns) + return + try: + lines=filter_comments(filepath) + lines.append((POPFILE,'')) + lines.reverse() + pc[filepath]=lines + self.lines.extend(lines) + except IOError: + raise PreprocError("could not read the file %s"%filepath) + except Exception: + if Logs.verbose>0: + error("parsing %s failed"%filepath) + traceback.print_exc() + def start(self,node,env): + debug('preproc: scanning %s (in %s)',node.name,node.parent.name) + bld=node.ctx + try: + self.parse_cache=bld.parse_cache + except AttributeError: + bld.parse_cache={} + self.parse_cache=bld.parse_cache + self.current_file=node + self.addlines(node) + if env['DEFINES']: + try: + lst=['%s %s'%(x[0],trimquotes('='.join(x[1:])))for x in[y.split('=')for y in env['DEFINES']]] + lst.reverse() + self.lines.extend([('define',x)for x in lst]) + except AttributeError: + pass + while self.lines: + (token,line)=self.lines.pop() + if token==POPFILE: + self.count_files-=1 + self.currentnode_stack.pop() + continue + try: + ve=Logs.verbose + if ve:debug('preproc: line is %s - %s state is %s',token,line,self.state) + state=self.state + if token[:2]=='if': + state.append(undefined) + elif token=='endif': + state.pop() + if token[0]!='e': + if skipped in self.state or ignored in self.state: + continue + if token=='if': + ret=eval_macro(tokenize(line),self.defs) + if ret:state[-1]=accepted + else:state[-1]=ignored + elif token=='ifdef': + m=re_mac.match(line) + if m and m.group(0)in self.defs:state[-1]=accepted + else:state[-1]=ignored + elif token=='ifndef': + m=re_mac.match(line) + if m and m.group(0)in self.defs:state[-1]=ignored + else:state[-1]=accepted + elif token=='include'or token=='import': + (kind,inc)=extract_include(line,self.defs) + if ve:debug('preproc: include found %s (%s) ',inc,kind) + if kind=='"'or not strict_quotes: + self.current_file=self.tryfind(inc) + if token=='import': + self.ban_includes.add(self.current_file) + elif token=='elif': + if state[-1]==accepted: + state[-1]=skipped + elif state[-1]==ignored: + if eval_macro(tokenize(line),self.defs): + state[-1]=accepted + elif token=='else': + if state[-1]==accepted:state[-1]=skipped + elif state[-1]==ignored:state[-1]=accepted + elif token=='define': + try: + self.defs[define_name(line)]=line + except Exception: + raise PreprocError("Invalid define line %s"%line) + elif token=='undef': + m=re_mac.match(line) + if m and m.group(0)in self.defs: + self.defs.__delitem__(m.group(0)) + elif token=='pragma': + if re_pragma_once.match(line.lower()): + self.ban_includes.add(self.current_file) + except Exception as e: + if Logs.verbose: + debug('preproc: line parsing failed (%s): %s %s',e,line,Utils.ex_stack()) +def scan(task): + global go_absolute + try: + incn=task.generator.includes_nodes + except AttributeError: + raise Errors.WafError('%r is missing a feature such as "c", "cxx" or "includes": '%task.generator) + if go_absolute: + nodepaths=incn+[task.generator.bld.root.find_dir(x)for x in standard_includes] + else: + nodepaths=[x for x in incn if x.is_child_of(x.ctx.srcnode)or x.is_child_of(x.ctx.bldnode)] + tmp=c_parser(nodepaths) + tmp.start(task.inputs[0],task.env) + if Logs.verbose: + debug('deps: deps for %r: %r; unresolved %r'%(task.inputs,tmp.nodes,tmp.names)) + return(tmp.nodes,tmp.names) diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_tests.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_tests.py new file mode 100644 index 0000000..f275977 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/c_tests.py @@ -0,0 +1,153 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +from waflib import Task +from waflib.Configure import conf +from waflib.TaskGen import feature,before_method,after_method +import sys +LIB_CODE=''' +#ifdef _MSC_VER +#define testEXPORT __declspec(dllexport) +#else +#define testEXPORT +#endif +testEXPORT int lib_func(void) { return 9; } +''' +MAIN_CODE=''' +#ifdef _MSC_VER +#define testEXPORT __declspec(dllimport) +#else +#define testEXPORT +#endif +testEXPORT int lib_func(void); +int main(int argc, char **argv) { + (void)argc; (void)argv; + return !(lib_func() == 9); +} +''' +@feature('link_lib_test') +@before_method('process_source') +def link_lib_test_fun(self): + def write_test_file(task): + task.outputs[0].write(task.generator.code) + rpath=[] + if getattr(self,'add_rpath',False): + rpath=[self.bld.path.get_bld().abspath()] + mode=self.mode + m='%s %s'%(mode,mode) + ex=self.test_exec and'test_exec'or'' + bld=self.bld + bld(rule=write_test_file,target='test.'+mode,code=LIB_CODE) + bld(rule=write_test_file,target='main.'+mode,code=MAIN_CODE) + bld(features='%sshlib'%m,source='test.'+mode,target='test') + bld(features='%sprogram %s'%(m,ex),source='main.'+mode,target='app',use='test',rpath=rpath) +@conf +def check_library(self,mode=None,test_exec=True): + if not mode: + mode='c' + if self.env.CXX: + mode='cxx' + self.check(compile_filename=[],features='link_lib_test',msg='Checking for libraries',mode=mode,test_exec=test_exec,) +INLINE_CODE=''' +typedef int foo_t; +static %s foo_t static_foo () {return 0; } +%s foo_t foo () { + return 0; +} +''' +INLINE_VALUES=['inline','__inline__','__inline'] +@conf +def check_inline(self,**kw): + self.start_msg('Checking for inline') + if not'define_name'in kw: + kw['define_name']='INLINE_MACRO' + if not'features'in kw: + if self.env.CXX: + kw['features']=['cxx'] + else: + kw['features']=['c'] + for x in INLINE_VALUES: + kw['fragment']=INLINE_CODE%(x,x) + try: + self.check(**kw) + except self.errors.ConfigurationError: + continue + else: + self.end_msg(x) + if x!='inline': + self.define('inline',x,quote=False) + return x + self.fatal('could not use inline functions') +LARGE_FRAGMENT='''#include <unistd.h> +int main(int argc, char **argv) { + (void)argc; (void)argv; + return !(sizeof(off_t) >= 8); +} +''' +@conf +def check_large_file(self,**kw): + if not'define_name'in kw: + kw['define_name']='HAVE_LARGEFILE' + if not'execute'in kw: + kw['execute']=True + if not'features'in kw: + if self.env.CXX: + kw['features']=['cxx','cxxprogram'] + else: + kw['features']=['c','cprogram'] + kw['fragment']=LARGE_FRAGMENT + kw['msg']='Checking for large file support' + ret=True + try: + if self.env.DEST_BINFMT!='pe': + ret=self.check(**kw) + except self.errors.ConfigurationError: + pass + else: + if ret: + return True + kw['msg']='Checking for -D_FILE_OFFSET_BITS=64' + kw['defines']=['_FILE_OFFSET_BITS=64'] + try: + ret=self.check(**kw) + except self.errors.ConfigurationError: + pass + else: + self.define('_FILE_OFFSET_BITS',64) + return ret + self.fatal('There is no support for large files') +ENDIAN_FRAGMENT=''' +short int ascii_mm[] = { 0x4249, 0x4765, 0x6E44, 0x6961, 0x6E53, 0x7953, 0 }; +short int ascii_ii[] = { 0x694C, 0x5454, 0x656C, 0x6E45, 0x6944, 0x6E61, 0 }; +int use_ascii (int i) { + return ascii_mm[i] + ascii_ii[i]; +} +short int ebcdic_ii[] = { 0x89D3, 0xE3E3, 0x8593, 0x95C5, 0x89C4, 0x9581, 0 }; +short int ebcdic_mm[] = { 0xC2C9, 0xC785, 0x95C4, 0x8981, 0x95E2, 0xA8E2, 0 }; +int use_ebcdic (int i) { + return ebcdic_mm[i] + ebcdic_ii[i]; +} +extern int foo; +''' +class grep_for_endianness(Task.Task): + color='PINK' + def run(self): + txt=self.inputs[0].read(flags='rb').decode('iso8859-1') + if txt.find('LiTTleEnDian')>-1: + self.generator.tmp.append('little') + elif txt.find('BIGenDianSyS')>-1: + self.generator.tmp.append('big') + else: + return-1 +@feature('grep_for_endianness') +@after_method('process_source') +def grep_for_endianness_fun(self): + self.create_task('grep_for_endianness',self.compiled_tasks[0].outputs[0]) +@conf +def check_endianness(self): + tmp=[] + def check_msg(self): + return tmp[0] + self.check(fragment=ENDIAN_FRAGMENT,features='c grep_for_endianness',msg="Checking for endianness",define='ENDIANNESS',tmp=tmp,okmsg=check_msg) + return tmp[0] diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/ccroot.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/ccroot.py new file mode 100644 index 0000000..2fde5c7 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/ccroot.py @@ -0,0 +1,405 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,re +from waflib import Task,Utils,Node,Errors +from waflib.TaskGen import after_method,before_method,feature,taskgen_method,extension +from waflib.Tools import c_aliases,c_preproc,c_config,c_osx,c_tests +from waflib.Configure import conf +SYSTEM_LIB_PATHS=['/usr/lib64','/usr/lib','/usr/local/lib64','/usr/local/lib'] +USELIB_VARS=Utils.defaultdict(set) +USELIB_VARS['c']=set(['INCLUDES','FRAMEWORKPATH','DEFINES','CPPFLAGS','CCDEPS','CFLAGS','ARCH']) +USELIB_VARS['cxx']=set(['INCLUDES','FRAMEWORKPATH','DEFINES','CPPFLAGS','CXXDEPS','CXXFLAGS','ARCH']) +USELIB_VARS['d']=set(['INCLUDES','DFLAGS']) +USELIB_VARS['includes']=set(['INCLUDES','FRAMEWORKPATH','ARCH']) +USELIB_VARS['cprogram']=USELIB_VARS['cxxprogram']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS','FRAMEWORK','FRAMEWORKPATH','ARCH']) +USELIB_VARS['cshlib']=USELIB_VARS['cxxshlib']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS','FRAMEWORK','FRAMEWORKPATH','ARCH']) +USELIB_VARS['cstlib']=USELIB_VARS['cxxstlib']=set(['ARFLAGS','LINKDEPS']) +USELIB_VARS['dprogram']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS']) +USELIB_VARS['dshlib']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS']) +USELIB_VARS['dstlib']=set(['ARFLAGS','LINKDEPS']) +USELIB_VARS['asm']=set(['ASFLAGS']) +@taskgen_method +def create_compiled_task(self,name,node): + out='%s.%d.o'%(node.name,self.idx) + task=self.create_task(name,node,node.parent.find_or_declare(out)) + try: + self.compiled_tasks.append(task) + except AttributeError: + self.compiled_tasks=[task] + return task +@taskgen_method +def to_incnodes(self,inlst): + lst=[] + seen=set([]) + for x in self.to_list(inlst): + if x in seen or not x: + continue + seen.add(x) + if isinstance(x,Node.Node): + lst.append(x) + else: + if os.path.isabs(x): + lst.append(self.bld.root.make_node(x)or x) + else: + if x[0]=='#': + p=self.bld.bldnode.make_node(x[1:]) + v=self.bld.srcnode.make_node(x[1:]) + else: + p=self.path.get_bld().make_node(x) + v=self.path.make_node(x) + if p.is_child_of(self.bld.bldnode): + p.mkdir() + lst.append(p) + lst.append(v) + return lst +@feature('c','cxx','d','asm','fc','includes') +@after_method('propagate_uselib_vars','process_source') +def apply_incpaths(self): + lst=self.to_incnodes(self.to_list(getattr(self,'includes',[]))+self.env['INCLUDES']) + self.includes_nodes=lst + self.env['INCPATHS']=[x.abspath()for x in lst] +class link_task(Task.Task): + color='YELLOW' + inst_to=None + chmod=Utils.O755 + def add_target(self,target): + if isinstance(target,str): + pattern=self.env[self.__class__.__name__+'_PATTERN'] + if not pattern: + pattern='%s' + folder,name=os.path.split(target) + if self.__class__.__name__.find('shlib')>0 and getattr(self.generator,'vnum',None): + nums=self.generator.vnum.split('.') + if self.env.DEST_BINFMT=='pe': + name=name+'-'+nums[0] + elif self.env.DEST_OS=='openbsd': + pattern='%s.%s.%s'%(pattern,nums[0],nums[1]) + tmp=folder+os.sep+pattern%name + target=self.generator.path.find_or_declare(tmp) + self.set_outputs(target) +class stlink_task(link_task): + run_str='${AR} ${ARFLAGS} ${AR_TGT_F}${TGT} ${AR_SRC_F}${SRC}' +def rm_tgt(cls): + old=cls.run + def wrap(self): + try:os.remove(self.outputs[0].abspath()) + except OSError:pass + return old(self) + setattr(cls,'run',wrap) +rm_tgt(stlink_task) +@feature('c','cxx','d','fc','asm') +@after_method('process_source') +def apply_link(self): + for x in self.features: + if x=='cprogram'and'cxx'in self.features: + x='cxxprogram' + elif x=='cshlib'and'cxx'in self.features: + x='cxxshlib' + if x in Task.classes: + if issubclass(Task.classes[x],link_task): + link=x + break + else: + return + objs=[t.outputs[0]for t in getattr(self,'compiled_tasks',[])] + self.link_task=self.create_task(link,objs) + self.link_task.add_target(self.target) + try: + inst_to=self.install_path + except AttributeError: + inst_to=self.link_task.__class__.inst_to + if inst_to: + self.install_task=self.bld.install_files(inst_to,self.link_task.outputs[:],env=self.env,chmod=self.link_task.chmod) +@taskgen_method +def use_rec(self,name,**kw): + if name in self.tmp_use_not or name in self.tmp_use_seen: + return + try: + y=self.bld.get_tgen_by_name(name) + except Errors.WafError: + self.uselib.append(name) + self.tmp_use_not.add(name) + return + self.tmp_use_seen.append(name) + y.post() + y.tmp_use_objects=objects=kw.get('objects',True) + y.tmp_use_stlib=stlib=kw.get('stlib',True) + try: + link_task=y.link_task + except AttributeError: + y.tmp_use_var='' + else: + objects=False + if not isinstance(link_task,stlink_task): + stlib=False + y.tmp_use_var='LIB' + else: + y.tmp_use_var='STLIB' + p=self.tmp_use_prec + for x in self.to_list(getattr(y,'use',[])): + try: + p[x].append(name) + except KeyError: + p[x]=[name] + self.use_rec(x,objects=objects,stlib=stlib) +@feature('c','cxx','d','use','fc') +@before_method('apply_incpaths','propagate_uselib_vars') +@after_method('apply_link','process_source') +def process_use(self): + use_not=self.tmp_use_not=set([]) + self.tmp_use_seen=[] + use_prec=self.tmp_use_prec={} + self.uselib=self.to_list(getattr(self,'uselib',[])) + self.includes=self.to_list(getattr(self,'includes',[])) + names=self.to_list(getattr(self,'use',[])) + for x in names: + self.use_rec(x) + for x in use_not: + if x in use_prec: + del use_prec[x] + out=[] + tmp=[] + for x in self.tmp_use_seen: + for k in use_prec.values(): + if x in k: + break + else: + tmp.append(x) + while tmp: + e=tmp.pop() + out.append(e) + try: + nlst=use_prec[e] + except KeyError: + pass + else: + del use_prec[e] + for x in nlst: + for y in use_prec: + if x in use_prec[y]: + break + else: + tmp.append(x) + if use_prec: + raise Errors.WafError('Cycle detected in the use processing %r'%use_prec) + out.reverse() + link_task=getattr(self,'link_task',None) + for x in out: + y=self.bld.get_tgen_by_name(x) + var=y.tmp_use_var + if var and link_task: + if var=='LIB'or y.tmp_use_stlib: + self.env.append_value(var,[y.target[y.target.rfind(os.sep)+1:]]) + self.link_task.dep_nodes.extend(y.link_task.outputs) + tmp_path=y.link_task.outputs[0].parent.path_from(self.bld.bldnode) + self.env.append_value(var+'PATH',[tmp_path]) + else: + if y.tmp_use_objects: + self.add_objects_from_tgen(y) + if getattr(y,'export_includes',None): + self.includes.extend(y.to_incnodes(y.export_includes)) + if getattr(y,'export_defines',None): + self.env.append_value('DEFINES',self.to_list(y.export_defines)) + for x in names: + try: + y=self.bld.get_tgen_by_name(x) + except Errors.WafError: + if not self.env['STLIB_'+x]and not x in self.uselib: + self.uselib.append(x) + else: + for k in self.to_list(getattr(y,'use',[])): + if not self.env['STLIB_'+k]and not k in self.uselib: + self.uselib.append(k) +@taskgen_method +def accept_node_to_link(self,node): + return not node.name.endswith('.pdb') +@taskgen_method +def add_objects_from_tgen(self,tg): + try: + link_task=self.link_task + except AttributeError: + pass + else: + for tsk in getattr(tg,'compiled_tasks',[]): + for x in tsk.outputs: + if self.accept_node_to_link(x): + link_task.inputs.append(x) +@taskgen_method +def get_uselib_vars(self): + _vars=set([]) + for x in self.features: + if x in USELIB_VARS: + _vars|=USELIB_VARS[x] + return _vars +@feature('c','cxx','d','fc','javac','cs','uselib','asm') +@after_method('process_use') +def propagate_uselib_vars(self): + _vars=self.get_uselib_vars() + env=self.env + for x in _vars: + y=x.lower() + env.append_unique(x,self.to_list(getattr(self,y,[]))) + for x in self.features: + for var in _vars: + compvar='%s_%s'%(var,x) + env.append_value(var,env[compvar]) + for x in self.to_list(getattr(self,'uselib',[])): + for v in _vars: + env.append_value(v,env[v+'_'+x]) +@feature('cshlib','cxxshlib','fcshlib') +@after_method('apply_link') +def apply_implib(self): + if not self.env.DEST_BINFMT=='pe': + return + dll=self.link_task.outputs[0] + if isinstance(self.target,Node.Node): + name=self.target.name + else: + name=os.path.split(self.target)[1] + implib=self.env['implib_PATTERN']%name + implib=dll.parent.find_or_declare(implib) + self.env.append_value('LINKFLAGS',self.env['IMPLIB_ST']%implib.bldpath()) + self.link_task.outputs.append(implib) + if getattr(self,'defs',None)and self.env.DEST_BINFMT=='pe': + node=self.path.find_resource(self.defs) + if not node: + raise Errors.WafError('invalid def file %r'%self.defs) + if'msvc'in(self.env.CC_NAME,self.env.CXX_NAME): + self.env.append_value('LINKFLAGS','/def:%s'%node.path_from(self.bld.bldnode)) + self.link_task.dep_nodes.append(node) + else: + self.link_task.inputs.append(node) + try: + inst_to=self.install_path + except AttributeError: + inst_to=self.link_task.__class__.inst_to + if not inst_to: + return + self.implib_install_task=self.bld.install_as('${LIBDIR}/%s'%implib.name,implib,self.env) +re_vnum=re.compile('^([1-9]\\d*|0)[.]([1-9]\\d*|0)[.]([1-9]\\d*|0)$') +@feature('cshlib','cxxshlib','dshlib','fcshlib','vnum') +@after_method('apply_link','propagate_uselib_vars') +def apply_vnum(self): + if not getattr(self,'vnum','')or os.name!='posix'or self.env.DEST_BINFMT not in('elf','mac-o'): + return + link=self.link_task + if not re_vnum.match(self.vnum): + raise Errors.WafError('Invalid version %r for %r'%(self.vnum,self)) + nums=self.vnum.split('.') + node=link.outputs[0] + libname=node.name + if libname.endswith('.dylib'): + name3=libname.replace('.dylib','.%s.dylib'%self.vnum) + name2=libname.replace('.dylib','.%s.dylib'%nums[0]) + else: + name3=libname+'.'+self.vnum + name2=libname+'.'+nums[0] + if self.env.SONAME_ST: + v=self.env.SONAME_ST%name2 + self.env.append_value('LINKFLAGS',v.split()) + if self.env.DEST_OS!='openbsd': + self.create_task('vnum',node,[node.parent.find_or_declare(name2),node.parent.find_or_declare(name3)]) + if getattr(self,'install_task',None): + self.install_task.hasrun=Task.SKIP_ME + bld=self.bld + path=self.install_task.dest + if self.env.DEST_OS=='openbsd': + libname=self.link_task.outputs[0].name + t1=bld.install_as('%s%s%s'%(path,os.sep,libname),node,env=self.env,chmod=self.link_task.chmod) + self.vnum_install_task=(t1,) + else: + t1=bld.install_as(path+os.sep+name3,node,env=self.env,chmod=self.link_task.chmod) + t2=bld.symlink_as(path+os.sep+name2,name3) + t3=bld.symlink_as(path+os.sep+libname,name3) + self.vnum_install_task=(t1,t2,t3) + if'-dynamiclib'in self.env['LINKFLAGS']: + try: + inst_to=self.install_path + except AttributeError: + inst_to=self.link_task.__class__.inst_to + if inst_to: + p=Utils.subst_vars(inst_to,self.env) + path=os.path.join(p,self.link_task.outputs[0].name) + self.env.append_value('LINKFLAGS',['-install_name',path]) +class vnum(Task.Task): + color='CYAN' + quient=True + ext_in=['.bin'] + def run(self): + for x in self.outputs: + path=x.abspath() + try: + os.remove(path) + except OSError: + pass + try: + os.symlink(self.inputs[0].name,path) + except OSError: + return 1 +class fake_shlib(link_task): + def runnable_status(self): + for t in self.run_after: + if not t.hasrun: + return Task.ASK_LATER + for x in self.outputs: + x.sig=Utils.h_file(x.abspath()) + return Task.SKIP_ME +class fake_stlib(stlink_task): + def runnable_status(self): + for t in self.run_after: + if not t.hasrun: + return Task.ASK_LATER + for x in self.outputs: + x.sig=Utils.h_file(x.abspath()) + return Task.SKIP_ME +@conf +def read_shlib(self,name,paths=[],export_includes=[],export_defines=[]): + return self(name=name,features='fake_lib',lib_paths=paths,lib_type='shlib',export_includes=export_includes,export_defines=export_defines) +@conf +def read_stlib(self,name,paths=[],export_includes=[],export_defines=[]): + return self(name=name,features='fake_lib',lib_paths=paths,lib_type='stlib',export_includes=export_includes,export_defines=export_defines) +lib_patterns={'shlib':['lib%s.so','%s.so','lib%s.dylib','lib%s.dll','%s.dll'],'stlib':['lib%s.a','%s.a','lib%s.dll','%s.dll','lib%s.lib','%s.lib'],} +@feature('fake_lib') +def process_lib(self): + node=None + names=[x%self.name for x in lib_patterns[self.lib_type]] + for x in self.lib_paths+[self.path]+SYSTEM_LIB_PATHS: + if not isinstance(x,Node.Node): + x=self.bld.root.find_node(x)or self.path.find_node(x) + if not x: + continue + for y in names: + node=x.find_node(y) + if node: + node.sig=Utils.h_file(node.abspath()) + break + else: + continue + break + else: + raise Errors.WafError('could not find library %r'%self.name) + self.link_task=self.create_task('fake_%s'%self.lib_type,[],[node]) + self.target=self.name +class fake_o(Task.Task): + def runnable_status(self): + return Task.SKIP_ME +@extension('.o','.obj') +def add_those_o_files(self,node): + tsk=self.create_task('fake_o',[],node) + try: + self.compiled_tasks.append(tsk) + except AttributeError: + self.compiled_tasks=[tsk] +@feature('fake_obj') +@before_method('process_source') +def process_objs(self): + for node in self.to_nodes(self.source): + self.add_those_o_files(node) + self.source=[] +@conf +def read_object(self,obj): + if not isinstance(obj,self.path.__class__): + obj=self.path.find_resource(obj) + return self(features='fake_obj',source=obj,name=obj.name) diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/compiler_c.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/compiler_c.py new file mode 100644 index 0000000..cb0cdc2 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/compiler_c.py @@ -0,0 +1,39 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,sys,imp,types +from waflib.Tools import ccroot +from waflib import Utils,Configure +from waflib.Logs import debug +c_compiler={'win32':['msvc','gcc'],'cygwin':['gcc'],'darwin':['gcc'],'aix':['xlc','gcc'],'linux':['gcc','icc'],'sunos':['suncc','gcc'],'irix':['gcc','irixcc'],'hpux':['gcc'],'gnu':['gcc'],'java':['gcc','msvc','icc'],'default':['gcc'],} +def configure(conf): + try:test_for_compiler=conf.options.check_c_compiler + except AttributeError:conf.fatal("Add options(opt): opt.load('compiler_c')") + for compiler in test_for_compiler.split(): + conf.env.stash() + conf.start_msg('Checking for %r (c compiler)'%compiler) + try: + conf.load(compiler) + except conf.errors.ConfigurationError as e: + conf.env.revert() + conf.end_msg(False) + debug('compiler_c: %r'%e) + else: + if conf.env['CC']: + conf.end_msg(conf.env.get_flat('CC')) + conf.env['COMPILER_CC']=compiler + break + conf.end_msg(False) + else: + conf.fatal('could not configure a c compiler!') +def options(opt): + opt.load_special_tools('c_*.py',ban=['c_dumbpreproc.py']) + global c_compiler + build_platform=Utils.unversioned_sys_platform() + possible_compiler_list=c_compiler[build_platform in c_compiler and build_platform or'default'] + test_for_compiler=' '.join(possible_compiler_list) + cc_compiler_opts=opt.add_option_group("C Compiler Options") + cc_compiler_opts.add_option('--check-c-compiler',default="%s"%test_for_compiler,help='On this platform (%s) the following C-Compiler will be checked by default: "%s"'%(build_platform,test_for_compiler),dest="check_c_compiler") + for x in test_for_compiler.split(): + opt.load('%s'%x) diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/compiler_cxx.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/compiler_cxx.py new file mode 100644 index 0000000..bb12911 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/compiler_cxx.py @@ -0,0 +1,39 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,sys,imp,types +from waflib.Tools import ccroot +from waflib import Utils,Configure +from waflib.Logs import debug +cxx_compiler={'win32':['msvc','g++'],'cygwin':['g++'],'darwin':['g++'],'aix':['xlc++','g++'],'linux':['g++','icpc'],'sunos':['sunc++','g++'],'irix':['g++'],'hpux':['g++'],'gnu':['g++'],'java':['g++','msvc','icpc'],'default':['g++']} +def configure(conf): + try:test_for_compiler=conf.options.check_cxx_compiler + except AttributeError:conf.fatal("Add options(opt): opt.load('compiler_cxx')") + for compiler in test_for_compiler.split(): + conf.env.stash() + conf.start_msg('Checking for %r (c++ compiler)'%compiler) + try: + conf.load(compiler) + except conf.errors.ConfigurationError as e: + conf.env.revert() + conf.end_msg(False) + debug('compiler_cxx: %r'%e) + else: + if conf.env['CXX']: + conf.end_msg(conf.env.get_flat('CXX')) + conf.env['COMPILER_CXX']=compiler + break + conf.end_msg(False) + else: + conf.fatal('could not configure a c++ compiler!') +def options(opt): + opt.load_special_tools('cxx_*.py') + global cxx_compiler + build_platform=Utils.unversioned_sys_platform() + possible_compiler_list=cxx_compiler[build_platform in cxx_compiler and build_platform or'default'] + test_for_compiler=' '.join(possible_compiler_list) + cxx_compiler_opts=opt.add_option_group('C++ Compiler Options') + cxx_compiler_opts.add_option('--check-cxx-compiler',default="%s"%test_for_compiler,help='On this platform (%s) the following C++ Compiler will be checked by default: "%s"'%(build_platform,test_for_compiler),dest="check_cxx_compiler") + for x in test_for_compiler.split(): + opt.load('%s'%x) diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/compiler_d.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/compiler_d.py new file mode 100644 index 0000000..4f9a6d8 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/compiler_d.py @@ -0,0 +1,29 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,sys,imp,types +from waflib import Utils,Configure,Options,Logs +def configure(conf): + for compiler in conf.options.dcheck.split(','): + conf.env.stash() + conf.start_msg('Checking for %r (d compiler)'%compiler) + try: + conf.load(compiler) + except conf.errors.ConfigurationError as e: + conf.env.revert() + conf.end_msg(False) + Logs.debug('compiler_d: %r'%e) + else: + if conf.env.D: + conf.end_msg(conf.env.get_flat('D')) + conf.env['COMPILER_D']=compiler + break + conf.end_msg(False) + else: + conf.fatal('no suitable d compiler was found') +def options(opt): + d_compiler_opts=opt.add_option_group('D Compiler Options') + d_compiler_opts.add_option('--check-d-compiler',default='gdc,dmd,ldc2',action='store',help='check for the compiler [Default:gdc,dmd,ldc2]',dest='dcheck') + for d_compiler in['gdc','dmd','ldc2']: + opt.load('%s'%d_compiler) diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/compiler_fc.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/compiler_fc.py new file mode 100644 index 0000000..656ed4d --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/compiler_fc.py @@ -0,0 +1,43 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,sys,imp,types +from waflib import Utils,Configure,Options,Logs,Errors +from waflib.Tools import fc +fc_compiler={'win32':['gfortran','ifort'],'darwin':['gfortran','g95','ifort'],'linux':['gfortran','g95','ifort'],'java':['gfortran','g95','ifort'],'default':['gfortran'],'aix':['gfortran']} +def __list_possible_compiler(platform): + try: + return fc_compiler[platform] + except KeyError: + return fc_compiler["default"] +def configure(conf): + try:test_for_compiler=conf.options.check_fc + except AttributeError:conf.fatal("Add options(opt): opt.load('compiler_fc')") + for compiler in test_for_compiler.split(): + conf.env.stash() + conf.start_msg('Checking for %r (fortran compiler)'%compiler) + try: + conf.load(compiler) + except conf.errors.ConfigurationError as e: + conf.env.revert() + conf.end_msg(False) + Logs.debug('compiler_fortran: %r'%e) + else: + if conf.env['FC']: + conf.end_msg(conf.env.get_flat('FC')) + conf.env.COMPILER_FORTRAN=compiler + break + conf.end_msg(False) + else: + conf.fatal('could not configure a fortran compiler!') +def options(opt): + opt.load_special_tools('fc_*.py') + build_platform=Utils.unversioned_sys_platform() + detected_platform=Options.platform + possible_compiler_list=__list_possible_compiler(detected_platform) + test_for_compiler=' '.join(possible_compiler_list) + fortran_compiler_opts=opt.add_option_group("Fortran Compiler Options") + fortran_compiler_opts.add_option('--check-fortran-compiler',default="%s"%test_for_compiler,help='On this platform (%s) the following Fortran Compiler will be checked by default: "%s"'%(detected_platform,test_for_compiler),dest="check_fc") + for compiler in test_for_compiler.split(): + opt.load('%s'%compiler) diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/cs.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/cs.py new file mode 100644 index 0000000..4a17c6e --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/cs.py @@ -0,0 +1,132 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +from waflib import Utils,Task,Options,Logs,Errors +from waflib.TaskGen import before_method,after_method,feature +from waflib.Tools import ccroot +from waflib.Configure import conf +import os,tempfile +ccroot.USELIB_VARS['cs']=set(['CSFLAGS','ASSEMBLIES','RESOURCES']) +ccroot.lib_patterns['csshlib']=['%s'] +@feature('cs') +@before_method('process_source') +def apply_cs(self): + cs_nodes=[] + no_nodes=[] + for x in self.to_nodes(self.source): + if x.name.endswith('.cs'): + cs_nodes.append(x) + else: + no_nodes.append(x) + self.source=no_nodes + bintype=getattr(self,'bintype',self.gen.endswith('.dll')and'library'or'exe') + self.cs_task=tsk=self.create_task('mcs',cs_nodes,self.path.find_or_declare(self.gen)) + tsk.env.CSTYPE='/target:%s'%bintype + tsk.env.OUT='/out:%s'%tsk.outputs[0].abspath() + self.env.append_value('CSFLAGS','/platform:%s'%getattr(self,'platform','anycpu')) + inst_to=getattr(self,'install_path',bintype=='exe'and'${BINDIR}'or'${LIBDIR}') + if inst_to: + mod=getattr(self,'chmod',bintype=='exe'and Utils.O755 or Utils.O644) + self.install_task=self.bld.install_files(inst_to,self.cs_task.outputs[:],env=self.env,chmod=mod) +@feature('cs') +@after_method('apply_cs') +def use_cs(self): + names=self.to_list(getattr(self,'use',[])) + get=self.bld.get_tgen_by_name + for x in names: + try: + y=get(x) + except Errors.WafError: + self.env.append_value('CSFLAGS','/reference:%s'%x) + continue + y.post() + tsk=getattr(y,'cs_task',None)or getattr(y,'link_task',None) + if not tsk: + self.bld.fatal('cs task has no link task for use %r'%self) + self.cs_task.dep_nodes.extend(tsk.outputs) + self.cs_task.set_run_after(tsk) + self.env.append_value('CSFLAGS','/reference:%s'%tsk.outputs[0].abspath()) +@feature('cs') +@after_method('apply_cs','use_cs') +def debug_cs(self): + csdebug=getattr(self,'csdebug',self.env.CSDEBUG) + if not csdebug: + return + node=self.cs_task.outputs[0] + if self.env.CS_NAME=='mono': + out=node.parent.find_or_declare(node.name+'.mdb') + else: + out=node.change_ext('.pdb') + self.cs_task.outputs.append(out) + try: + self.install_task.source.append(out) + except AttributeError: + pass + if csdebug=='pdbonly': + val=['/debug+','/debug:pdbonly'] + elif csdebug=='full': + val=['/debug+','/debug:full'] + else: + val=['/debug-'] + self.env.append_value('CSFLAGS',val) +class mcs(Task.Task): + color='YELLOW' + run_str='${MCS} ${CSTYPE} ${CSFLAGS} ${ASS_ST:ASSEMBLIES} ${RES_ST:RESOURCES} ${OUT} ${SRC}' + def exec_command(self,cmd,**kw): + bld=self.generator.bld + try: + if not kw.get('cwd',None): + kw['cwd']=bld.cwd + except AttributeError: + bld.cwd=kw['cwd']=bld.variant_dir + try: + tmp=None + if isinstance(cmd,list)and len(' '.join(cmd))>=8192: + program=cmd[0] + cmd=[self.quote_response_command(x)for x in cmd] + (fd,tmp)=tempfile.mkstemp() + os.write(fd,'\r\n'.join(i.replace('\\','\\\\')for i in cmd[1:]).encode()) + os.close(fd) + cmd=[program,'@'+tmp] + ret=self.generator.bld.exec_command(cmd,**kw) + finally: + if tmp: + try: + os.remove(tmp) + except OSError: + pass + return ret + def quote_response_command(self,flag): + if flag.lower()=='/noconfig': + return'' + if flag.find(' ')>-1: + for x in('/r:','/reference:','/resource:','/lib:','/out:'): + if flag.startswith(x): + flag='%s"%s"'%(x,'","'.join(flag[len(x):].split(','))) + break + else: + flag='"%s"'%flag + return flag +def configure(conf): + csc=getattr(Options.options,'cscbinary',None) + if csc: + conf.env.MCS=csc + conf.find_program(['csc','mcs','gmcs'],var='MCS') + conf.env.ASS_ST='/r:%s' + conf.env.RES_ST='/resource:%s' + conf.env.CS_NAME='csc' + if str(conf.env.MCS).lower().find('mcs')>-1: + conf.env.CS_NAME='mono' +def options(opt): + opt.add_option('--with-csc-binary',type='string',dest='cscbinary') +class fake_csshlib(Task.Task): + color='YELLOW' + inst_to=None + def runnable_status(self): + for x in self.outputs: + x.sig=Utils.h_file(x.abspath()) + return Task.SKIP_ME +@conf +def read_csshlib(self,name,paths=[]): + return self(name=name,features='fake_lib',lib_paths=paths,lib_type='csshlib') diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/cxx.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/cxx.py new file mode 100644 index 0000000..b744a8d --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/cxx.py @@ -0,0 +1,26 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +from waflib import TaskGen,Task,Utils +from waflib.Tools import c_preproc +from waflib.Tools.ccroot import link_task,stlink_task +@TaskGen.extension('.cpp','.cc','.cxx','.C','.c++') +def cxx_hook(self,node): + return self.create_compiled_task('cxx',node) +if not'.c'in TaskGen.task_gen.mappings: + TaskGen.task_gen.mappings['.c']=TaskGen.task_gen.mappings['.cpp'] +class cxx(Task.Task): + run_str='${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT}' + vars=['CXXDEPS'] + ext_in=['.h'] + scan=c_preproc.scan +class cxxprogram(link_task): + run_str='${LINK_CXX} ${LINKFLAGS} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB}' + vars=['LINKDEPS'] + ext_out=['.bin'] + inst_to='${BINDIR}' +class cxxshlib(cxxprogram): + inst_to='${LIBDIR}' +class cxxstlib(stlink_task): + pass diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/d.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/d.py new file mode 100644 index 0000000..1838740 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/d.py @@ -0,0 +1,54 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +from waflib import Utils,Task,Errors +from waflib.TaskGen import taskgen_method,feature,extension +from waflib.Tools import d_scan,d_config +from waflib.Tools.ccroot import link_task,stlink_task +class d(Task.Task): + color='GREEN' + run_str='${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_SRC_F:SRC} ${D_TGT_F:TGT}' + scan=d_scan.scan +class d_with_header(d): + run_str='${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_HDR_F:tgt.outputs[1].bldpath()} ${D_SRC_F:SRC} ${D_TGT_F:tgt.outputs[0].bldpath()}' +class d_header(Task.Task): + color='BLUE' + run_str='${D} ${D_HEADER} ${SRC}' +class dprogram(link_task): + run_str='${D_LINKER} ${LINKFLAGS} ${DLNK_SRC_F}${SRC} ${DLNK_TGT_F:TGT} ${RPATH_ST:RPATH} ${DSTLIB_MARKER} ${DSTLIBPATH_ST:STLIBPATH} ${DSTLIB_ST:STLIB} ${DSHLIB_MARKER} ${DLIBPATH_ST:LIBPATH} ${DSHLIB_ST:LIB}' + inst_to='${BINDIR}' +class dshlib(dprogram): + inst_to='${LIBDIR}' +class dstlib(stlink_task): + pass +@extension('.d','.di','.D') +def d_hook(self,node): + ext=Utils.destos_to_binfmt(self.env.DEST_OS)=='pe'and'obj'or'o' + out='%s.%d.%s'%(node.name,self.idx,ext) + def create_compiled_task(self,name,node): + task=self.create_task(name,node,node.parent.find_or_declare(out)) + try: + self.compiled_tasks.append(task) + except AttributeError: + self.compiled_tasks=[task] + return task + if getattr(self,'generate_headers',None): + tsk=create_compiled_task(self,'d_with_header',node) + tsk.outputs.append(node.change_ext(self.env['DHEADER_ext'])) + else: + tsk=create_compiled_task(self,'d',node) + return tsk +@taskgen_method +def generate_header(self,filename): + try: + self.header_lst.append([filename,self.install_path]) + except AttributeError: + self.header_lst=[[filename,self.install_path]] +@feature('d') +def process_header(self): + for i in getattr(self,'header_lst',[]): + node=self.path.find_resource(i[0]) + if not node: + raise Errors.WafError('file %r not found on d obj'%i[0]) + self.create_task('d_header',node,node.change_ext('.di')) diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/d_config.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/d_config.py new file mode 100644 index 0000000..50660ea --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/d_config.py @@ -0,0 +1,52 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +from waflib import Utils +from waflib.Configure import conf +@conf +def d_platform_flags(self): + v=self.env + if not v.DEST_OS: + v.DEST_OS=Utils.unversioned_sys_platform() + binfmt=Utils.destos_to_binfmt(self.env.DEST_OS) + if binfmt=='pe': + v['dprogram_PATTERN']='%s.exe' + v['dshlib_PATTERN']='lib%s.dll' + v['dstlib_PATTERN']='lib%s.a' + elif binfmt=='mac-o': + v['dprogram_PATTERN']='%s' + v['dshlib_PATTERN']='lib%s.dylib' + v['dstlib_PATTERN']='lib%s.a' + else: + v['dprogram_PATTERN']='%s' + v['dshlib_PATTERN']='lib%s.so' + v['dstlib_PATTERN']='lib%s.a' +DLIB=''' +version(D_Version2) { + import std.stdio; + int main() { + writefln("phobos2"); + return 0; + } +} else { + version(Tango) { + import tango.stdc.stdio; + int main() { + printf("tango"); + return 0; + } + } else { + import std.stdio; + int main() { + writefln("phobos1"); + return 0; + } + } +} +''' +@conf +def check_dlibrary(self,execute=True): + ret=self.check_cc(features='d dprogram',fragment=DLIB,compile_filename='test.d',execute=execute,define_ret=True) + if execute: + self.env.DLIBRARY=ret.strip() diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/d_scan.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/d_scan.py new file mode 100644 index 0000000..ee80c5f --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/d_scan.py @@ -0,0 +1,133 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import re +from waflib import Utils,Logs +def filter_comments(filename): + txt=Utils.readf(filename) + i=0 + buf=[] + max=len(txt) + begin=0 + while i<max: + c=txt[i] + if c=='"'or c=="'": + buf.append(txt[begin:i]) + delim=c + i+=1 + while i<max: + c=txt[i] + if c==delim:break + elif c=='\\': + i+=1 + i+=1 + i+=1 + begin=i + elif c=='/': + buf.append(txt[begin:i]) + i+=1 + if i==max:break + c=txt[i] + if c=='+': + i+=1 + nesting=1 + c=None + while i<max: + prev=c + c=txt[i] + if prev=='/'and c=='+': + nesting+=1 + c=None + elif prev=='+'and c=='/': + nesting-=1 + if nesting==0:break + c=None + i+=1 + elif c=='*': + i+=1 + c=None + while i<max: + prev=c + c=txt[i] + if prev=='*'and c=='/':break + i+=1 + elif c=='/': + i+=1 + while i<max and txt[i]!='\n': + i+=1 + else: + begin=i-1 + continue + i+=1 + begin=i + buf.append(' ') + else: + i+=1 + buf.append(txt[begin:]) + return buf +class d_parser(object): + def __init__(self,env,incpaths): + self.allnames=[] + self.re_module=re.compile("module\s+([^;]+)") + self.re_import=re.compile("import\s+([^;]+)") + self.re_import_bindings=re.compile("([^:]+):(.*)") + self.re_import_alias=re.compile("[^=]+=(.+)") + self.env=env + self.nodes=[] + self.names=[] + self.incpaths=incpaths + def tryfind(self,filename): + found=0 + for n in self.incpaths: + found=n.find_resource(filename.replace('.','/')+'.d') + if found: + self.nodes.append(found) + self.waiting.append(found) + break + if not found: + if not filename in self.names: + self.names.append(filename) + def get_strings(self,code): + self.module='' + lst=[] + mod_name=self.re_module.search(code) + if mod_name: + self.module=re.sub('\s+','',mod_name.group(1)) + import_iterator=self.re_import.finditer(code) + if import_iterator: + for import_match in import_iterator: + import_match_str=re.sub('\s+','',import_match.group(1)) + bindings_match=self.re_import_bindings.match(import_match_str) + if bindings_match: + import_match_str=bindings_match.group(1) + matches=import_match_str.split(',') + for match in matches: + alias_match=self.re_import_alias.match(match) + if alias_match: + match=alias_match.group(1) + lst.append(match) + return lst + def start(self,node): + self.waiting=[node] + while self.waiting: + nd=self.waiting.pop(0) + self.iter(nd) + def iter(self,node): + path=node.abspath() + code="".join(filter_comments(path)) + names=self.get_strings(code) + for x in names: + if x in self.allnames:continue + self.allnames.append(x) + self.tryfind(x) +def scan(self): + env=self.env + gruik=d_parser(env,self.generator.includes_nodes) + node=self.inputs[0] + gruik.start(node) + nodes=gruik.nodes + names=gruik.names + if Logs.verbose: + Logs.debug('deps: deps for %s: %r; unresolved %r'%(str(node),nodes,names)) + return(nodes,names) diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/dbus.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/dbus.py new file mode 100644 index 0000000..ccea278 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/dbus.py @@ -0,0 +1,29 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +from waflib import Task,Errors +from waflib.TaskGen import taskgen_method,before_method +@taskgen_method +def add_dbus_file(self,filename,prefix,mode): + if not hasattr(self,'dbus_lst'): + self.dbus_lst=[] + if not'process_dbus'in self.meths: + self.meths.append('process_dbus') + self.dbus_lst.append([filename,prefix,mode]) +@before_method('apply_core') +def process_dbus(self): + for filename,prefix,mode in getattr(self,'dbus_lst',[]): + node=self.path.find_resource(filename) + if not node: + raise Errors.WafError('file not found '+filename) + tsk=self.create_task('dbus_binding_tool',node,node.change_ext('.h')) + tsk.env.DBUS_BINDING_TOOL_PREFIX=prefix + tsk.env.DBUS_BINDING_TOOL_MODE=mode +class dbus_binding_tool(Task.Task): + color='BLUE' + ext_out=['.h'] + run_str='${DBUS_BINDING_TOOL} --prefix=${DBUS_BINDING_TOOL_PREFIX} --mode=${DBUS_BINDING_TOOL_MODE} --output=${TGT} ${SRC}' + shell=True +def configure(conf): + dbus_binding_tool=conf.find_program('dbus-binding-tool',var='DBUS_BINDING_TOOL') diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/dmd.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/dmd.py new file mode 100644 index 0000000..b6e3303 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/dmd.py @@ -0,0 +1,51 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import sys +from waflib.Tools import ar,d +from waflib.Configure import conf +@conf +def find_dmd(conf): + conf.find_program(['dmd','dmd2','ldc'],var='D') + out=conf.cmd_and_log([conf.env.D,'--help']) + if out.find("D Compiler v")==-1: + out=conf.cmd_and_log([conf.env.D,'-version']) + if out.find("based on DMD v1.")==-1: + conf.fatal("detected compiler is not dmd/ldc") +@conf +def common_flags_ldc(conf): + v=conf.env + v['DFLAGS']=['-d-version=Posix'] + v['LINKFLAGS']=[] + v['DFLAGS_dshlib']=['-relocation-model=pic'] +@conf +def common_flags_dmd(conf): + v=conf.env + v['D_SRC_F']=['-c'] + v['D_TGT_F']='-of%s' + v['D_LINKER']=v['D'] + v['DLNK_SRC_F']='' + v['DLNK_TGT_F']='-of%s' + v['DINC_ST']='-I%s' + v['DSHLIB_MARKER']=v['DSTLIB_MARKER']='' + v['DSTLIB_ST']=v['DSHLIB_ST']='-L-l%s' + v['DSTLIBPATH_ST']=v['DLIBPATH_ST']='-L-L%s' + v['LINKFLAGS_dprogram']=['-quiet'] + v['DFLAGS_dshlib']=['-fPIC'] + v['LINKFLAGS_dshlib']=['-L-shared'] + v['DHEADER_ext']='.di' + v.DFLAGS_d_with_header=['-H','-Hf'] + v['D_HDR_F']='%s' +def configure(conf): + conf.find_dmd() + if sys.platform=='win32': + out=conf.cmd_and_log([conf.env.D,'--help']) + if out.find("D Compiler v2.")>-1: + conf.fatal('dmd2 on Windows is not supported, use gdc or ldc2 instead') + conf.load('ar') + conf.load('d') + conf.common_flags_dmd() + conf.d_platform_flags() + if str(conf.env.D).find('ldc')>-1: + conf.common_flags_ldc() diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/errcheck.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/errcheck.py new file mode 100644 index 0000000..3b06493 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/errcheck.py @@ -0,0 +1,161 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +typos={'feature':'features','sources':'source','targets':'target','include':'includes','export_include':'export_includes','define':'defines','importpath':'includes','installpath':'install_path','iscopy':'is_copy',} +meths_typos=['__call__','program','shlib','stlib','objects'] +from waflib import Logs,Build,Node,Task,TaskGen,ConfigSet,Errors,Utils +import waflib.Tools.ccroot +def check_same_targets(self): + mp=Utils.defaultdict(list) + uids={} + def check_task(tsk): + if not isinstance(tsk,Task.Task): + return + for node in tsk.outputs: + mp[node].append(tsk) + try: + uids[tsk.uid()].append(tsk) + except KeyError: + uids[tsk.uid()]=[tsk] + for g in self.groups: + for tg in g: + try: + for tsk in tg.tasks: + check_task(tsk) + except AttributeError: + check_task(tg) + dupe=False + for(k,v)in mp.items(): + if len(v)>1: + dupe=True + msg='* Node %r is created more than once%s. The task generators are:'%(k,Logs.verbose==1 and" (full message on 'waf -v -v')"or"") + Logs.error(msg) + for x in v: + if Logs.verbose>1: + Logs.error(' %d. %r'%(1+v.index(x),x.generator)) + else: + Logs.error(' %d. %r in %r'%(1+v.index(x),x.generator.name,getattr(x.generator,'path',None))) + if not dupe: + for(k,v)in uids.items(): + if len(v)>1: + Logs.error('* Several tasks use the same identifier. Please check the information on\n http://docs.waf.googlecode.com/git/apidocs_16/Task.html#waflib.Task.Task.uid') + for tsk in v: + Logs.error(' - object %r (%r) defined in %r'%(tsk.__class__.__name__,tsk,tsk.generator)) +def check_invalid_constraints(self): + feat=set([]) + for x in list(TaskGen.feats.values()): + feat.union(set(x)) + for(x,y)in TaskGen.task_gen.prec.items(): + feat.add(x) + feat.union(set(y)) + ext=set([]) + for x in TaskGen.task_gen.mappings.values(): + ext.add(x.__name__) + invalid=ext&feat + if invalid: + Logs.error('The methods %r have invalid annotations: @extension <-> @feature/@before_method/@after_method'%list(invalid)) + for cls in list(Task.classes.values()): + for x in('before','after'): + for y in Utils.to_list(getattr(cls,x,[])): + if not Task.classes.get(y,None): + Logs.error('Erroneous order constraint %r=%r on task class %r'%(x,y,cls.__name__)) + if getattr(cls,'rule',None): + Logs.error('Erroneous attribute "rule" on task class %r (rename to "run_str")'%cls.__name__) +def replace(m): + oldcall=getattr(Build.BuildContext,m) + def call(self,*k,**kw): + ret=oldcall(self,*k,**kw) + for x in typos: + if x in kw: + if x=='iscopy'and'subst'in getattr(self,'features',''): + continue + err=True + Logs.error('Fix the typo %r -> %r on %r'%(x,typos[x],ret)) + return ret + setattr(Build.BuildContext,m,call) +def enhance_lib(): + for m in meths_typos: + replace(m) + def ant_glob(self,*k,**kw): + if k: + lst=Utils.to_list(k[0]) + for pat in lst: + if'..'in pat.split('/'): + Logs.error("In ant_glob pattern %r: '..' means 'two dots', not 'parent directory'"%k[0]) + if kw.get('remove',True): + try: + if self.is_child_of(self.ctx.bldnode)and not kw.get('quiet',False): + Logs.error('Using ant_glob on the build folder (%r) is dangerous (quiet=True to disable this warning)'%self) + except AttributeError: + pass + return self.old_ant_glob(*k,**kw) + Node.Node.old_ant_glob=Node.Node.ant_glob + Node.Node.ant_glob=ant_glob + old=Task.is_before + def is_before(t1,t2): + ret=old(t1,t2) + if ret and old(t2,t1): + Logs.error('Contradictory order constraints in classes %r %r'%(t1,t2)) + return ret + Task.is_before=is_before + def check_err_features(self): + lst=self.to_list(self.features) + if'shlib'in lst: + Logs.error('feature shlib -> cshlib, dshlib or cxxshlib') + for x in('c','cxx','d','fc'): + if not x in lst and lst and lst[0]in[x+y for y in('program','shlib','stlib')]: + Logs.error('%r features is probably missing %r'%(self,x)) + TaskGen.feature('*')(check_err_features) + def check_err_order(self): + if not hasattr(self,'rule')and not'subst'in Utils.to_list(self.features): + for x in('before','after','ext_in','ext_out'): + if hasattr(self,x): + Logs.warn('Erroneous order constraint %r on non-rule based task generator %r'%(x,self)) + else: + for x in('before','after'): + for y in self.to_list(getattr(self,x,[])): + if not Task.classes.get(y,None): + Logs.error('Erroneous order constraint %s=%r on %r (no such class)'%(x,y,self)) + TaskGen.feature('*')(check_err_order) + def check_compile(self): + check_invalid_constraints(self) + try: + ret=self.orig_compile() + finally: + check_same_targets(self) + return ret + Build.BuildContext.orig_compile=Build.BuildContext.compile + Build.BuildContext.compile=check_compile + def use_rec(self,name,**kw): + try: + y=self.bld.get_tgen_by_name(name) + except Errors.WafError: + pass + else: + idx=self.bld.get_group_idx(self) + odx=self.bld.get_group_idx(y) + if odx>idx: + msg="Invalid 'use' across build groups:" + if Logs.verbose>1: + msg+='\n target %r\n uses:\n %r'%(self,y) + else: + msg+=" %r uses %r (try 'waf -v -v' for the full error)"%(self.name,name) + raise Errors.WafError(msg) + self.orig_use_rec(name,**kw) + TaskGen.task_gen.orig_use_rec=TaskGen.task_gen.use_rec + TaskGen.task_gen.use_rec=use_rec + def getattri(self,name,default=None): + if name=='append'or name=='add': + raise Errors.WafError('env.append and env.add do not exist: use env.append_value/env.append_unique') + elif name=='prepend': + raise Errors.WafError('env.prepend does not exist: use env.prepend_value') + if name in self.__slots__: + return object.__getattr__(self,name,default) + else: + return self[name] + ConfigSet.ConfigSet.__getattr__=getattri +def options(opt): + enhance_lib() +def configure(conf): + pass diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/fc.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/fc.py new file mode 100644 index 0000000..c882425 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/fc.py @@ -0,0 +1,116 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import re +from waflib import Utils,Task,TaskGen,Logs +from waflib.Tools import ccroot,fc_config,fc_scan +from waflib.TaskGen import feature,before_method,after_method,extension +from waflib.Configure import conf +ccroot.USELIB_VARS['fc']=set(['FCFLAGS','DEFINES','INCLUDES']) +ccroot.USELIB_VARS['fcprogram_test']=ccroot.USELIB_VARS['fcprogram']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS']) +ccroot.USELIB_VARS['fcshlib']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS']) +ccroot.USELIB_VARS['fcstlib']=set(['ARFLAGS','LINKDEPS']) +@feature('fcprogram','fcshlib','fcstlib','fcprogram_test') +def dummy(self): + pass +@extension('.f','.f90','.F','.F90','.for','.FOR') +def fc_hook(self,node): + return self.create_compiled_task('fc',node) +@conf +def modfile(conf,name): + return{'lower':name.lower()+'.mod','lower.MOD':name.upper()+'.MOD','UPPER.mod':name.upper()+'.mod','UPPER':name.upper()+'.MOD'}[conf.env.FC_MOD_CAPITALIZATION or'lower'] +def get_fortran_tasks(tsk): + bld=tsk.generator.bld + tasks=bld.get_tasks_group(bld.get_group_idx(tsk.generator)) + return[x for x in tasks if isinstance(x,fc)and not getattr(x,'nomod',None)and not getattr(x,'mod_fortran_done',None)] +class fc(Task.Task): + color='GREEN' + run_str='${FC} ${FCFLAGS} ${FCINCPATH_ST:INCPATHS} ${FCDEFINES_ST:DEFINES} ${_FCMODOUTFLAGS} ${FC_TGT_F}${TGT[0].abspath()} ${FC_SRC_F}${SRC[0].abspath()}' + vars=["FORTRANMODPATHFLAG"] + def scan(self): + tmp=fc_scan.fortran_parser(self.generator.includes_nodes) + tmp.task=self + tmp.start(self.inputs[0]) + if Logs.verbose: + Logs.debug('deps: deps for %r: %r; unresolved %r'%(self.inputs,tmp.nodes,tmp.names)) + return(tmp.nodes,tmp.names) + def runnable_status(self): + if getattr(self,'mod_fortran_done',None): + return super(fc,self).runnable_status() + bld=self.generator.bld + lst=get_fortran_tasks(self) + for tsk in lst: + tsk.mod_fortran_done=True + for tsk in lst: + ret=tsk.runnable_status() + if ret==Task.ASK_LATER: + for x in lst: + x.mod_fortran_done=None + return Task.ASK_LATER + ins=Utils.defaultdict(set) + outs=Utils.defaultdict(set) + for tsk in lst: + key=tsk.uid() + for x in bld.raw_deps[key]: + if x.startswith('MOD@'): + name=bld.modfile(x.replace('MOD@','')) + node=bld.srcnode.find_or_declare(name) + tsk.set_outputs(node) + outs[id(node)].add(tsk) + for tsk in lst: + key=tsk.uid() + for x in bld.raw_deps[key]: + if x.startswith('USE@'): + name=bld.modfile(x.replace('USE@','')) + node=bld.srcnode.find_resource(name) + if node and node not in tsk.outputs: + if not node in bld.node_deps[key]: + bld.node_deps[key].append(node) + ins[id(node)].add(tsk) + for k in ins.keys(): + for a in ins[k]: + a.run_after.update(outs[k]) + tmp=[] + for t in outs[k]: + tmp.extend(t.outputs) + a.dep_nodes.extend(tmp) + a.dep_nodes.sort(key=lambda x:x.abspath()) + for tsk in lst: + try: + delattr(tsk,'cache_sig') + except AttributeError: + pass + return super(fc,self).runnable_status() +class fcprogram(ccroot.link_task): + color='YELLOW' + run_str='${FC} ${LINKFLAGS} ${FCLNK_SRC_F}${SRC} ${FCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FCSTLIB_MARKER} ${FCSTLIBPATH_ST:STLIBPATH} ${FCSTLIB_ST:STLIB} ${FCSHLIB_MARKER} ${FCLIBPATH_ST:LIBPATH} ${FCLIB_ST:LIB}' + inst_to='${BINDIR}' +class fcshlib(fcprogram): + inst_to='${LIBDIR}' +class fcprogram_test(fcprogram): + def can_retrieve_cache(self): + return False + def runnable_status(self): + ret=super(fcprogram_test,self).runnable_status() + if ret==Task.SKIP_ME: + ret=Task.RUN_ME + return ret + def exec_command(self,cmd,**kw): + bld=self.generator.bld + kw['shell']=isinstance(cmd,str) + kw['stdout']=kw['stderr']=Utils.subprocess.PIPE + kw['cwd']=bld.variant_dir + bld.out=bld.err='' + bld.to_log('command: %s\n'%cmd) + kw['output']=0 + try: + (bld.out,bld.err)=bld.cmd_and_log(cmd,**kw) + except Exception as e: + return-1 + if bld.out: + bld.to_log("out: %s\n"%bld.out) + if bld.err: + bld.to_log("err: %s\n"%bld.err) +class fcstlib(ccroot.stlink_task): + pass diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/fc_config.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/fc_config.py new file mode 100644 index 0000000..bb384ec --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/fc_config.py @@ -0,0 +1,285 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import re,shutil,os,sys,string,shlex +from waflib.Configure import conf +from waflib.TaskGen import feature,after_method,before_method +from waflib import Build,Utils +FC_FRAGMENT=' program main\n end program main\n' +FC_FRAGMENT2=' PROGRAM MAIN\n END\n' +@conf +def fc_flags(conf): + v=conf.env + v['FC_SRC_F']=[] + v['FC_TGT_F']=['-c','-o'] + v['FCINCPATH_ST']='-I%s' + v['FCDEFINES_ST']='-D%s' + if not v['LINK_FC']:v['LINK_FC']=v['FC'] + v['FCLNK_SRC_F']=[] + v['FCLNK_TGT_F']=['-o'] + v['FCFLAGS_fcshlib']=['-fpic'] + v['LINKFLAGS_fcshlib']=['-shared'] + v['fcshlib_PATTERN']='lib%s.so' + v['fcstlib_PATTERN']='lib%s.a' + v['FCLIB_ST']='-l%s' + v['FCLIBPATH_ST']='-L%s' + v['FCSTLIB_ST']='-l%s' + v['FCSTLIBPATH_ST']='-L%s' + v['FCSTLIB_MARKER']='-Wl,-Bstatic' + v['FCSHLIB_MARKER']='-Wl,-Bdynamic' + v['SONAME_ST']='-Wl,-h,%s' +@conf +def fc_add_flags(conf): + conf.add_os_flags('FCFLAGS') + conf.add_os_flags('LDFLAGS','LINKFLAGS') +@conf +def check_fortran(self,*k,**kw): + self.check_cc(fragment=FC_FRAGMENT,compile_filename='test.f',features='fc fcprogram',msg='Compiling a simple fortran app') +@conf +def check_fc(self,*k,**kw): + kw['compiler']='fc' + if not'compile_mode'in kw: + kw['compile_mode']='fc' + if not'type'in kw: + kw['type']='fcprogram' + if not'compile_filename'in kw: + kw['compile_filename']='test.f90' + if not'code'in kw: + kw['code']=FC_FRAGMENT + return self.check(*k,**kw) +@conf +def fortran_modifier_darwin(conf): + v=conf.env + v['FCFLAGS_fcshlib']=['-fPIC'] + v['LINKFLAGS_fcshlib']=['-dynamiclib','-Wl,-compatibility_version,1','-Wl,-current_version,1'] + v['fcshlib_PATTERN']='lib%s.dylib' + v['FRAMEWORKPATH_ST']='-F%s' + v['FRAMEWORK_ST']='-framework %s' + v['LINKFLAGS_fcstlib']=[] + v['FCSHLIB_MARKER']='' + v['FCSTLIB_MARKER']='' + v['SONAME_ST']='' +@conf +def fortran_modifier_win32(conf): + v=conf.env + v['fcprogram_PATTERN']=v['fcprogram_test_PATTERN']='%s.exe' + v['fcshlib_PATTERN']='%s.dll' + v['implib_PATTERN']='lib%s.dll.a' + v['IMPLIB_ST']='-Wl,--out-implib,%s' + v['FCFLAGS_fcshlib']=[] + v.append_value('FCFLAGS_fcshlib',['-DDLL_EXPORT']) + v.append_value('LINKFLAGS',['-Wl,--enable-auto-import']) +@conf +def fortran_modifier_cygwin(conf): + fortran_modifier_win32(conf) + v=conf.env + v['fcshlib_PATTERN']='cyg%s.dll' + v.append_value('LINKFLAGS_fcshlib',['-Wl,--enable-auto-image-base']) + v['FCFLAGS_fcshlib']=[] +@conf +def check_fortran_dummy_main(self,*k,**kw): + if not self.env.CC: + self.fatal('A c compiler is required for check_fortran_dummy_main') + lst=['MAIN__','__MAIN','_MAIN','MAIN_','MAIN'] + lst.extend([m.lower()for m in lst]) + lst.append('') + self.start_msg('Detecting whether we need a dummy main') + for main in lst: + kw['fortran_main']=main + try: + self.check_cc(fragment='int %s() { return 0; }\n'%(main or'test'),features='c fcprogram',mandatory=True) + if not main: + self.env.FC_MAIN=-1 + self.end_msg('no') + else: + self.env.FC_MAIN=main + self.end_msg('yes %s'%main) + break + except self.errors.ConfigurationError: + pass + else: + self.end_msg('not found') + self.fatal('could not detect whether fortran requires a dummy main, see the config.log') +GCC_DRIVER_LINE=re.compile('^Driving:') +POSIX_STATIC_EXT=re.compile('\S+\.a') +POSIX_LIB_FLAGS=re.compile('-l\S+') +@conf +def is_link_verbose(self,txt): + assert isinstance(txt,str) + for line in txt.splitlines(): + if not GCC_DRIVER_LINE.search(line): + if POSIX_STATIC_EXT.search(line)or POSIX_LIB_FLAGS.search(line): + return True + return False +@conf +def check_fortran_verbose_flag(self,*k,**kw): + self.start_msg('fortran link verbose flag') + for x in['-v','--verbose','-verbose','-V']: + try: + self.check_cc(features='fc fcprogram_test',fragment=FC_FRAGMENT2,compile_filename='test.f',linkflags=[x],mandatory=True) + except self.errors.ConfigurationError: + pass + else: + if self.is_link_verbose(self.test_bld.err)or self.is_link_verbose(self.test_bld.out): + self.end_msg(x) + break + else: + self.end_msg('failure') + self.fatal('Could not obtain the fortran link verbose flag (see config.log)') + self.env.FC_VERBOSE_FLAG=x + return x +LINKFLAGS_IGNORED=[r'-lang*',r'-lcrt[a-zA-Z0-9\.]*\.o',r'-lc$',r'-lSystem',r'-libmil',r'-LIST:*',r'-LNO:*'] +if os.name=='nt': + LINKFLAGS_IGNORED.extend([r'-lfrt*',r'-luser32',r'-lkernel32',r'-ladvapi32',r'-lmsvcrt',r'-lshell32',r'-lmingw',r'-lmoldname']) +else: + LINKFLAGS_IGNORED.append(r'-lgcc*') +RLINKFLAGS_IGNORED=[re.compile(f)for f in LINKFLAGS_IGNORED] +def _match_ignore(line): + for i in RLINKFLAGS_IGNORED: + if i.match(line): + return True + return False +def parse_fortran_link(lines): + final_flags=[] + for line in lines: + if not GCC_DRIVER_LINE.match(line): + _parse_flink_line(line,final_flags) + return final_flags +SPACE_OPTS=re.compile('^-[LRuYz]$') +NOSPACE_OPTS=re.compile('^-[RL]') +def _parse_flink_line(line,final_flags): + lexer=shlex.shlex(line,posix=True) + lexer.whitespace_split=True + t=lexer.get_token() + tmp_flags=[] + while t: + def parse(token): + if _match_ignore(token): + pass + elif token.startswith('-lkernel32')and sys.platform=='cygwin': + tmp_flags.append(token) + elif SPACE_OPTS.match(token): + t=lexer.get_token() + if t.startswith('P,'): + t=t[2:] + for opt in t.split(os.pathsep): + tmp_flags.append('-L%s'%opt) + elif NOSPACE_OPTS.match(token): + tmp_flags.append(token) + elif POSIX_LIB_FLAGS.match(token): + tmp_flags.append(token) + else: + pass + t=lexer.get_token() + return t + t=parse(t) + final_flags.extend(tmp_flags) + return final_flags +@conf +def check_fortran_clib(self,autoadd=True,*k,**kw): + if not self.env.FC_VERBOSE_FLAG: + self.fatal('env.FC_VERBOSE_FLAG is not set: execute check_fortran_verbose_flag?') + self.start_msg('Getting fortran runtime link flags') + try: + self.check_cc(fragment=FC_FRAGMENT2,compile_filename='test.f',features='fc fcprogram_test',linkflags=[self.env.FC_VERBOSE_FLAG]) + except Exception: + self.end_msg(False) + if kw.get('mandatory',True): + conf.fatal('Could not find the c library flags') + else: + out=self.test_bld.err + flags=parse_fortran_link(out.splitlines()) + self.end_msg('ok (%s)'%' '.join(flags)) + self.env.LINKFLAGS_CLIB=flags + return flags + return[] +def getoutput(conf,cmd,stdin=False): + if stdin: + stdin=Utils.subprocess.PIPE + else: + stdin=None + env=conf.env.env or None + try: + p=Utils.subprocess.Popen(cmd,stdin=stdin,stdout=Utils.subprocess.PIPE,stderr=Utils.subprocess.PIPE,env=env) + if stdin: + p.stdin.write('\n'.encode()) + out,err=p.communicate() + except Exception: + conf.fatal('could not determine the compiler version %r'%cmd) + if not isinstance(out,str): + out=out.decode(sys.stdout.encoding or'iso8859-1') + if not isinstance(err,str): + err=err.decode(sys.stdout.encoding or'iso8859-1') + return(out,err) +ROUTINES_CODE="""\ + subroutine foobar() + return + end + subroutine foo_bar() + return + end +""" +MAIN_CODE=""" +void %(dummy_func_nounder)s(void); +void %(dummy_func_under)s(void); +int %(main_func_name)s() { + %(dummy_func_nounder)s(); + %(dummy_func_under)s(); + return 0; +} +""" +@feature('link_main_routines_func') +@before_method('process_source') +def link_main_routines_tg_method(self): + def write_test_file(task): + task.outputs[0].write(task.generator.code) + bld=self.bld + bld(rule=write_test_file,target='main.c',code=MAIN_CODE%self.__dict__) + bld(rule=write_test_file,target='test.f',code=ROUTINES_CODE) + bld(features='fc fcstlib',source='test.f',target='test') + bld(features='c fcprogram',source='main.c',target='app',use='test') +def mangling_schemes(): + for u in['_','']: + for du in['','_']: + for c in["lower","upper"]: + yield(u,du,c) +def mangle_name(u,du,c,name): + return getattr(name,c)()+u+(name.find('_')!=-1 and du or'') +@conf +def check_fortran_mangling(self,*k,**kw): + if not self.env.CC: + self.fatal('A c compiler is required for link_main_routines') + if not self.env.FC: + self.fatal('A fortran compiler is required for link_main_routines') + if not self.env.FC_MAIN: + self.fatal('Checking for mangling requires self.env.FC_MAIN (execute "check_fortran_dummy_main" first?)') + self.start_msg('Getting fortran mangling scheme') + for(u,du,c)in mangling_schemes(): + try: + self.check_cc(compile_filename=[],features='link_main_routines_func',msg='nomsg',errmsg='nomsg',mandatory=True,dummy_func_nounder=mangle_name(u,du,c,"foobar"),dummy_func_under=mangle_name(u,du,c,"foo_bar"),main_func_name=self.env.FC_MAIN) + except self.errors.ConfigurationError: + pass + else: + self.end_msg("ok ('%s', '%s', '%s-case')"%(u,du,c)) + self.env.FORTRAN_MANGLING=(u,du,c) + break + else: + self.end_msg(False) + self.fatal('mangler not found') + return(u,du,c) +@feature('pyext') +@before_method('propagate_uselib_vars','apply_link') +def set_lib_pat(self): + self.env['fcshlib_PATTERN']=self.env['pyext_PATTERN'] +@conf +def detect_openmp(self): + for x in['-fopenmp','-openmp','-mp','-xopenmp','-omp','-qsmp=omp']: + try: + self.check_fc(msg='Checking for OpenMP flag %s'%x,fragment='program main\n call omp_get_num_threads()\nend program main',fcflags=x,linkflags=x,uselib_store='OPENMP') + except self.errors.ConfigurationError: + pass + else: + break + else: + self.fatal('Could not find OpenMP') diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/fc_scan.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/fc_scan.py new file mode 100644 index 0000000..e4e2344 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/fc_scan.py @@ -0,0 +1,68 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import re +from waflib import Utils,Task,TaskGen,Logs +from waflib.TaskGen import feature,before_method,after_method,extension +from waflib.Configure import conf +INC_REGEX="""(?:^|['">]\s*;)\s*(?:|#\s*)INCLUDE\s+(?:\w+_)?[<"'](.+?)(?=["'>])""" +USE_REGEX="""(?:^|;)\s*USE(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)""" +MOD_REGEX="""(?:^|;)\s*MODULE(?!\s*PROCEDURE)(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)""" +re_inc=re.compile(INC_REGEX,re.I) +re_use=re.compile(USE_REGEX,re.I) +re_mod=re.compile(MOD_REGEX,re.I) +class fortran_parser(object): + def __init__(self,incpaths): + self.seen=[] + self.nodes=[] + self.names=[] + self.incpaths=incpaths + def find_deps(self,node): + txt=node.read() + incs=[] + uses=[] + mods=[] + for line in txt.splitlines(): + m=re_inc.search(line) + if m: + incs.append(m.group(1)) + m=re_use.search(line) + if m: + uses.append(m.group(1)) + m=re_mod.search(line) + if m: + mods.append(m.group(1)) + return(incs,uses,mods) + def start(self,node): + self.waiting=[node] + while self.waiting: + nd=self.waiting.pop(0) + self.iter(nd) + def iter(self,node): + path=node.abspath() + incs,uses,mods=self.find_deps(node) + for x in incs: + if x in self.seen: + continue + self.seen.append(x) + self.tryfind_header(x) + for x in uses: + name="USE@%s"%x + if not name in self.names: + self.names.append(name) + for x in mods: + name="MOD@%s"%x + if not name in self.names: + self.names.append(name) + def tryfind_header(self,filename): + found=None + for n in self.incpaths: + found=n.find_resource(filename) + if found: + self.nodes.append(found) + self.waiting.append(found) + break + if not found: + if not filename in self.names: + self.names.append(filename) diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/flex.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/flex.py new file mode 100644 index 0000000..13f6207 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/flex.py @@ -0,0 +1,32 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import waflib.TaskGen,os,re +def decide_ext(self,node): + if'cxx'in self.features: + return['.lex.cc'] + return['.lex.c'] +def flexfun(tsk): + env=tsk.env + bld=tsk.generator.bld + wd=bld.variant_dir + def to_list(xx): + if isinstance(xx,str):return[xx] + return xx + tsk.last_cmd=lst=[] + lst.extend(to_list(env['FLEX'])) + lst.extend(to_list(env['FLEXFLAGS'])) + inputs=[a.path_from(bld.bldnode)for a in tsk.inputs] + if env.FLEX_MSYS: + inputs=[x.replace(os.sep,'/')for x in inputs] + lst.extend(inputs) + lst=[x for x in lst if x] + txt=bld.cmd_and_log(lst,cwd=wd,env=env.env or None,quiet=0) + tsk.outputs[0].write(txt.replace('\r\n','\n').replace('\r','\n')) +waflib.TaskGen.declare_chain(name='flex',rule=flexfun,ext_in='.l',decider=decide_ext,) +def configure(conf): + conf.find_program('flex',var='FLEX') + conf.env.FLEXFLAGS=['-t'] + if re.search(r"\\msys\\[0-9.]+\\bin\\flex.exe$",conf.env.FLEX): + conf.env.FLEX_MSYS=True diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/g95.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/g95.py new file mode 100644 index 0000000..9bc331a --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/g95.py @@ -0,0 +1,55 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import re +from waflib import Utils +from waflib.Tools import fc,fc_config,fc_scan,ar +from waflib.Configure import conf +@conf +def find_g95(conf): + fc=conf.find_program('g95',var='FC') + fc=conf.cmd_to_list(fc) + conf.get_g95_version(fc) + conf.env.FC_NAME='G95' +@conf +def g95_flags(conf): + v=conf.env + v['FCFLAGS_fcshlib']=['-fPIC'] + v['FORTRANMODFLAG']=['-fmod=',''] + v['FCFLAGS_DEBUG']=['-Werror'] +@conf +def g95_modifier_win32(conf): + fc_config.fortran_modifier_win32(conf) +@conf +def g95_modifier_cygwin(conf): + fc_config.fortran_modifier_cygwin(conf) +@conf +def g95_modifier_darwin(conf): + fc_config.fortran_modifier_darwin(conf) +@conf +def g95_modifier_platform(conf): + dest_os=conf.env['DEST_OS']or Utils.unversioned_sys_platform() + g95_modifier_func=getattr(conf,'g95_modifier_'+dest_os,None) + if g95_modifier_func: + g95_modifier_func() +@conf +def get_g95_version(conf,fc): + version_re=re.compile(r"g95\s*(?P<major>\d*)\.(?P<minor>\d*)").search + cmd=fc+['--version'] + out,err=fc_config.getoutput(conf,cmd,stdin=False) + if out: + match=version_re(out) + else: + match=version_re(err) + if not match: + conf.fatal('cannot determine g95 version') + k=match.groupdict() + conf.env['FC_VERSION']=(k['major'],k['minor']) +def configure(conf): + conf.find_g95() + conf.find_ar() + conf.fc_flags() + conf.fc_add_flags() + conf.g95_flags() + conf.g95_modifier_platform() diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gas.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gas.py new file mode 100644 index 0000000..b714ca1 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gas.py @@ -0,0 +1,12 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import waflib.Tools.asm +from waflib.Tools import ar +def configure(conf): + conf.find_program(['gas','gcc'],var='AS') + conf.env.AS_TGT_F=['-c','-o'] + conf.env.ASLNK_TGT_F=['-o'] + conf.find_ar() + conf.load('asm') diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gcc.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gcc.py new file mode 100644 index 0000000..aca49ca --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gcc.py @@ -0,0 +1,98 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +from waflib.Tools import ccroot,ar +from waflib.Configure import conf +@conf +def find_gcc(conf): + cc=conf.find_program(['gcc','cc'],var='CC') + cc=conf.cmd_to_list(cc) + conf.get_cc_version(cc,gcc=True) + conf.env.CC_NAME='gcc' + conf.env.CC=cc +@conf +def gcc_common_flags(conf): + v=conf.env + v['CC_SRC_F']=[] + v['CC_TGT_F']=['-c','-o'] + if not v['LINK_CC']:v['LINK_CC']=v['CC'] + v['CCLNK_SRC_F']=[] + v['CCLNK_TGT_F']=['-o'] + v['CPPPATH_ST']='-I%s' + v['DEFINES_ST']='-D%s' + v['LIB_ST']='-l%s' + v['LIBPATH_ST']='-L%s' + v['STLIB_ST']='-l%s' + v['STLIBPATH_ST']='-L%s' + v['RPATH_ST']='-Wl,-rpath,%s' + v['SONAME_ST']='-Wl,-h,%s' + v['SHLIB_MARKER']='-Wl,-Bdynamic' + v['STLIB_MARKER']='-Wl,-Bstatic' + v['cprogram_PATTERN']='%s' + v['CFLAGS_cshlib']=['-fPIC'] + v['LINKFLAGS_cshlib']=['-shared'] + v['cshlib_PATTERN']='lib%s.so' + v['LINKFLAGS_cstlib']=['-Wl,-Bstatic'] + v['cstlib_PATTERN']='lib%s.a' + v['LINKFLAGS_MACBUNDLE']=['-bundle','-undefined','dynamic_lookup'] + v['CFLAGS_MACBUNDLE']=['-fPIC'] + v['macbundle_PATTERN']='%s.bundle' +@conf +def gcc_modifier_win32(conf): + v=conf.env + v['cprogram_PATTERN']='%s.exe' + v['cshlib_PATTERN']='%s.dll' + v['implib_PATTERN']='lib%s.dll.a' + v['IMPLIB_ST']='-Wl,--out-implib,%s' + v['CFLAGS_cshlib']=[] + v.append_value('LINKFLAGS',['-Wl,--enable-auto-import']) +@conf +def gcc_modifier_cygwin(conf): + gcc_modifier_win32(conf) + v=conf.env + v['cshlib_PATTERN']='cyg%s.dll' + v.append_value('LINKFLAGS_cshlib',['-Wl,--enable-auto-image-base']) + v['CFLAGS_cshlib']=[] +@conf +def gcc_modifier_darwin(conf): + v=conf.env + v['CFLAGS_cshlib']=['-fPIC'] + v['LINKFLAGS_cshlib']=['-dynamiclib','-Wl,-compatibility_version,1','-Wl,-current_version,1'] + v['cshlib_PATTERN']='lib%s.dylib' + v['FRAMEWORKPATH_ST']='-F%s' + v['FRAMEWORK_ST']=['-framework'] + v['ARCH_ST']=['-arch'] + v['LINKFLAGS_cstlib']=[] + v['SHLIB_MARKER']=[] + v['STLIB_MARKER']=[] + v['SONAME_ST']=[] +@conf +def gcc_modifier_aix(conf): + v=conf.env + v['LINKFLAGS_cprogram']=['-Wl,-brtl'] + v['LINKFLAGS_cshlib']=['-shared','-Wl,-brtl,-bexpfull'] + v['SHLIB_MARKER']=[] +@conf +def gcc_modifier_hpux(conf): + v=conf.env + v['SHLIB_MARKER']=[] + v['STLIB_MARKER']='-Bstatic' + v['CFLAGS_cshlib']=['-fPIC','-DPIC'] + v['cshlib_PATTERN']='lib%s.sl' +@conf +def gcc_modifier_openbsd(conf): + conf.env.SONAME_ST=[] +@conf +def gcc_modifier_platform(conf): + gcc_modifier_func=getattr(conf,'gcc_modifier_'+conf.env.DEST_OS,None) + if gcc_modifier_func: + gcc_modifier_func() +def configure(conf): + conf.find_gcc() + conf.find_ar() + conf.gcc_common_flags() + conf.gcc_modifier_platform() + conf.cc_load_tools() + conf.cc_add_flags() + conf.link_add_flags() diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gdc.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gdc.py new file mode 100644 index 0000000..da966ec --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gdc.py @@ -0,0 +1,36 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import sys +from waflib.Tools import ar,d +from waflib.Configure import conf +@conf +def find_gdc(conf): + conf.find_program('gdc',var='D') + out=conf.cmd_and_log([conf.env.D,'--version']) + if out.find("gdc ")==-1: + conf.fatal("detected compiler is not gdc") +@conf +def common_flags_gdc(conf): + v=conf.env + v['DFLAGS']=[] + v['D_SRC_F']=['-c'] + v['D_TGT_F']='-o%s' + v['D_LINKER']=v['D'] + v['DLNK_SRC_F']='' + v['DLNK_TGT_F']='-o%s' + v['DINC_ST']='-I%s' + v['DSHLIB_MARKER']=v['DSTLIB_MARKER']='' + v['DSTLIB_ST']=v['DSHLIB_ST']='-l%s' + v['DSTLIBPATH_ST']=v['DLIBPATH_ST']='-L%s' + v['LINKFLAGS_dshlib']=['-shared'] + v['DHEADER_ext']='.di' + v.DFLAGS_d_with_header='-fintfc' + v['D_HDR_F']='-fintfc-file=%s' +def configure(conf): + conf.find_gdc() + conf.load('ar') + conf.load('d') + conf.common_flags_gdc() + conf.d_platform_flags() diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gfortran.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gfortran.py new file mode 100644 index 0000000..854a93d --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gfortran.py @@ -0,0 +1,69 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import re +from waflib import Utils +from waflib.Tools import fc,fc_config,fc_scan,ar +from waflib.Configure import conf +@conf +def find_gfortran(conf): + fc=conf.find_program(['gfortran','g77'],var='FC') + fc=conf.cmd_to_list(fc) + conf.get_gfortran_version(fc) + conf.env.FC_NAME='GFORTRAN' +@conf +def gfortran_flags(conf): + v=conf.env + v['FCFLAGS_fcshlib']=['-fPIC'] + v['FORTRANMODFLAG']=['-J',''] + v['FCFLAGS_DEBUG']=['-Werror'] +@conf +def gfortran_modifier_win32(conf): + fc_config.fortran_modifier_win32(conf) +@conf +def gfortran_modifier_cygwin(conf): + fc_config.fortran_modifier_cygwin(conf) +@conf +def gfortran_modifier_darwin(conf): + fc_config.fortran_modifier_darwin(conf) +@conf +def gfortran_modifier_platform(conf): + dest_os=conf.env['DEST_OS']or Utils.unversioned_sys_platform() + gfortran_modifier_func=getattr(conf,'gfortran_modifier_'+dest_os,None) + if gfortran_modifier_func: + gfortran_modifier_func() +@conf +def get_gfortran_version(conf,fc): + version_re=re.compile(r"GNU\s*Fortran",re.I).search + cmd=fc+['--version'] + out,err=fc_config.getoutput(conf,cmd,stdin=False) + if out:match=version_re(out) + else:match=version_re(err) + if not match: + conf.fatal('Could not determine the compiler type') + cmd=fc+['-dM','-E','-'] + out,err=fc_config.getoutput(conf,cmd,stdin=True) + if out.find('__GNUC__')<0: + conf.fatal('Could not determine the compiler type') + k={} + out=out.split('\n') + import shlex + for line in out: + lst=shlex.split(line) + if len(lst)>2: + key=lst[1] + val=lst[2] + k[key]=val + def isD(var): + return var in k + def isT(var): + return var in k and k[var]!='0' + conf.env['FC_VERSION']=(k['__GNUC__'],k['__GNUC_MINOR__'],k['__GNUC_PATCHLEVEL__']) +def configure(conf): + conf.find_gfortran() + conf.find_ar() + conf.fc_flags() + conf.fc_add_flags() + conf.gfortran_flags() + conf.gfortran_modifier_platform() diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/glib2.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/glib2.py new file mode 100644 index 0000000..1d75510 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/glib2.py @@ -0,0 +1,173 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os +from waflib import Task,Utils,Options,Errors,Logs +from waflib.TaskGen import taskgen_method,before_method,after_method,feature +@taskgen_method +def add_marshal_file(self,filename,prefix): + if not hasattr(self,'marshal_list'): + self.marshal_list=[] + self.meths.append('process_marshal') + self.marshal_list.append((filename,prefix)) +@before_method('process_source') +def process_marshal(self): + for f,prefix in getattr(self,'marshal_list',[]): + node=self.path.find_resource(f) + if not node: + raise Errors.WafError('file not found %r'%f) + h_node=node.change_ext('.h') + c_node=node.change_ext('.c') + task=self.create_task('glib_genmarshal',node,[h_node,c_node]) + task.env.GLIB_GENMARSHAL_PREFIX=prefix + self.source=self.to_nodes(getattr(self,'source',[])) + self.source.append(c_node) +class glib_genmarshal(Task.Task): + def run(self): + bld=self.inputs[0].__class__.ctx + get=self.env.get_flat + cmd1="%s %s --prefix=%s --header > %s"%(get('GLIB_GENMARSHAL'),self.inputs[0].srcpath(),get('GLIB_GENMARSHAL_PREFIX'),self.outputs[0].abspath()) + ret=bld.exec_command(cmd1) + if ret:return ret + c='''#include "%s"\n'''%self.outputs[0].name + self.outputs[1].write(c) + cmd2="%s %s --prefix=%s --body >> %s"%(get('GLIB_GENMARSHAL'),self.inputs[0].srcpath(),get('GLIB_GENMARSHAL_PREFIX'),self.outputs[1].abspath()) + return bld.exec_command(cmd2) + vars=['GLIB_GENMARSHAL_PREFIX','GLIB_GENMARSHAL'] + color='BLUE' + ext_out=['.h'] +@taskgen_method +def add_enums_from_template(self,source='',target='',template='',comments=''): + if not hasattr(self,'enums_list'): + self.enums_list=[] + self.meths.append('process_enums') + self.enums_list.append({'source':source,'target':target,'template':template,'file-head':'','file-prod':'','file-tail':'','enum-prod':'','value-head':'','value-prod':'','value-tail':'','comments':comments}) +@taskgen_method +def add_enums(self,source='',target='',file_head='',file_prod='',file_tail='',enum_prod='',value_head='',value_prod='',value_tail='',comments=''): + if not hasattr(self,'enums_list'): + self.enums_list=[] + self.meths.append('process_enums') + self.enums_list.append({'source':source,'template':'','target':target,'file-head':file_head,'file-prod':file_prod,'file-tail':file_tail,'enum-prod':enum_prod,'value-head':value_head,'value-prod':value_prod,'value-tail':value_tail,'comments':comments}) +@before_method('process_source') +def process_enums(self): + for enum in getattr(self,'enums_list',[]): + task=self.create_task('glib_mkenums') + env=task.env + inputs=[] + source_list=self.to_list(enum['source']) + if not source_list: + raise Errors.WafError('missing source '+str(enum)) + source_list=[self.path.find_resource(k)for k in source_list] + inputs+=source_list + env['GLIB_MKENUMS_SOURCE']=[k.abspath()for k in source_list] + if not enum['target']: + raise Errors.WafError('missing target '+str(enum)) + tgt_node=self.path.find_or_declare(enum['target']) + if tgt_node.name.endswith('.c'): + self.source.append(tgt_node) + env['GLIB_MKENUMS_TARGET']=tgt_node.abspath() + options=[] + if enum['template']: + template_node=self.path.find_resource(enum['template']) + options.append('--template %s'%(template_node.abspath())) + inputs.append(template_node) + params={'file-head':'--fhead','file-prod':'--fprod','file-tail':'--ftail','enum-prod':'--eprod','value-head':'--vhead','value-prod':'--vprod','value-tail':'--vtail','comments':'--comments'} + for param,option in params.items(): + if enum[param]: + options.append('%s %r'%(option,enum[param])) + env['GLIB_MKENUMS_OPTIONS']=' '.join(options) + task.set_inputs(inputs) + task.set_outputs(tgt_node) +class glib_mkenums(Task.Task): + run_str='${GLIB_MKENUMS} ${GLIB_MKENUMS_OPTIONS} ${GLIB_MKENUMS_SOURCE} > ${GLIB_MKENUMS_TARGET}' + color='PINK' + ext_out=['.h'] +@taskgen_method +def add_settings_schemas(self,filename_list): + if not hasattr(self,'settings_schema_files'): + self.settings_schema_files=[] + if not isinstance(filename_list,list): + filename_list=[filename_list] + self.settings_schema_files.extend(filename_list) +@taskgen_method +def add_settings_enums(self,namespace,filename_list): + if hasattr(self,'settings_enum_namespace'): + raise Errors.WafError("Tried to add gsettings enums to '%s' more than once"%self.name) + self.settings_enum_namespace=namespace + if type(filename_list)!='list': + filename_list=[filename_list] + self.settings_enum_files=filename_list +def r_change_ext(self,ext): + name=self.name + k=name.rfind('.') + if k>=0: + name=name[:k]+ext + else: + name=name+ext + return self.parent.find_or_declare([name]) +@feature('glib2') +def process_settings(self): + enums_tgt_node=[] + install_files=[] + settings_schema_files=getattr(self,'settings_schema_files',[]) + if settings_schema_files and not self.env['GLIB_COMPILE_SCHEMAS']: + raise Errors.WafError("Unable to process GSettings schemas - glib-compile-schemas was not found during configure") + if hasattr(self,'settings_enum_files'): + enums_task=self.create_task('glib_mkenums') + source_list=self.settings_enum_files + source_list=[self.path.find_resource(k)for k in source_list] + enums_task.set_inputs(source_list) + enums_task.env['GLIB_MKENUMS_SOURCE']=[k.abspath()for k in source_list] + target=self.settings_enum_namespace+'.enums.xml' + tgt_node=self.path.find_or_declare(target) + enums_task.set_outputs(tgt_node) + enums_task.env['GLIB_MKENUMS_TARGET']=tgt_node.abspath() + enums_tgt_node=[tgt_node] + install_files.append(tgt_node) + options='--comments "<!-- @comment@ -->" --fhead "<schemalist>" --vhead " <@type@ id=\\"%s.@EnumName@\\">" --vprod " <value nick=\\"@valuenick@\\" value=\\"@valuenum@\\"/>" --vtail " </@type@>" --ftail "</schemalist>" '%(self.settings_enum_namespace) + enums_task.env['GLIB_MKENUMS_OPTIONS']=options + for schema in settings_schema_files: + schema_task=self.create_task('glib_validate_schema') + schema_node=self.path.find_resource(schema) + if not schema_node: + raise Errors.WafError("Cannot find the schema file '%s'"%schema) + install_files.append(schema_node) + source_list=enums_tgt_node+[schema_node] + schema_task.set_inputs(source_list) + schema_task.env['GLIB_COMPILE_SCHEMAS_OPTIONS']=[("--schema-file="+k.abspath())for k in source_list] + target_node=r_change_ext(schema_node,'.xml.valid') + schema_task.set_outputs(target_node) + schema_task.env['GLIB_VALIDATE_SCHEMA_OUTPUT']=target_node.abspath() + def compile_schemas_callback(bld): + if not bld.is_install:return + Logs.pprint('YELLOW','Updating GSettings schema cache') + command=Utils.subst_vars("${GLIB_COMPILE_SCHEMAS} ${GSETTINGSSCHEMADIR}",bld.env) + ret=self.bld.exec_command(command) + if self.bld.is_install: + if not self.env['GSETTINGSSCHEMADIR']: + raise Errors.WafError('GSETTINGSSCHEMADIR not defined (should have been set up automatically during configure)') + if install_files: + self.bld.install_files(self.env['GSETTINGSSCHEMADIR'],install_files) + if not hasattr(self.bld,'_compile_schemas_registered'): + self.bld.add_post_fun(compile_schemas_callback) + self.bld._compile_schemas_registered=True +class glib_validate_schema(Task.Task): + run_str='rm -f ${GLIB_VALIDATE_SCHEMA_OUTPUT} && ${GLIB_COMPILE_SCHEMAS} --dry-run ${GLIB_COMPILE_SCHEMAS_OPTIONS} && touch ${GLIB_VALIDATE_SCHEMA_OUTPUT}' + color='PINK' +def configure(conf): + conf.find_program('glib-genmarshal',var='GLIB_GENMARSHAL') + conf.find_perl_program('glib-mkenums',var='GLIB_MKENUMS') + conf.find_program('glib-compile-schemas',var='GLIB_COMPILE_SCHEMAS',mandatory=False) + def getstr(varname): + return getattr(Options.options,varname,getattr(conf.env,varname,'')) + gsettingsschemadir=getstr('GSETTINGSSCHEMADIR') + if not gsettingsschemadir: + datadir=getstr('DATADIR') + if not datadir: + prefix=conf.env['PREFIX'] + datadir=os.path.join(prefix,'share') + gsettingsschemadir=os.path.join(datadir,'glib-2.0','schemas') + conf.env['GSETTINGSSCHEMADIR']=gsettingsschemadir +def options(opt): + opt.add_option('--gsettingsschemadir',help='GSettings schema location [Default: ${datadir}/glib-2.0/schemas]',default='',dest='GSETTINGSSCHEMADIR') diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gnu_dirs.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gnu_dirs.py new file mode 100644 index 0000000..9c8a304 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gnu_dirs.py @@ -0,0 +1,65 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os +from waflib import Utils,Options,Context +_options=[x.split(', ')for x in''' +bindir, user executables, ${EXEC_PREFIX}/bin +sbindir, system admin executables, ${EXEC_PREFIX}/sbin +libexecdir, program executables, ${EXEC_PREFIX}/libexec +sysconfdir, read-only single-machine data, ${PREFIX}/etc +sharedstatedir, modifiable architecture-independent data, ${PREFIX}/com +localstatedir, modifiable single-machine data, ${PREFIX}/var +libdir, object code libraries, ${EXEC_PREFIX}/lib +includedir, C header files, ${PREFIX}/include +oldincludedir, C header files for non-gcc, /usr/include +datarootdir, read-only arch.-independent data root, ${PREFIX}/share +datadir, read-only architecture-independent data, ${DATAROOTDIR} +infodir, info documentation, ${DATAROOTDIR}/info +localedir, locale-dependent data, ${DATAROOTDIR}/locale +mandir, man documentation, ${DATAROOTDIR}/man +docdir, documentation root, ${DATAROOTDIR}/doc/${PACKAGE} +htmldir, html documentation, ${DOCDIR} +dvidir, dvi documentation, ${DOCDIR} +pdfdir, pdf documentation, ${DOCDIR} +psdir, ps documentation, ${DOCDIR} +'''.split('\n')if x] +def configure(conf): + def get_param(varname,default): + return getattr(Options.options,varname,'')or default + env=conf.env + env.LIBDIR=env.BINDIR=[] + env.EXEC_PREFIX=get_param('EXEC_PREFIX',env.PREFIX) + env.PACKAGE=getattr(Context.g_module,'APPNAME',None)or env.PACKAGE + complete=False + iter=0 + while not complete and iter<len(_options)+1: + iter+=1 + complete=True + for name,help,default in _options: + name=name.upper() + if not env[name]: + try: + env[name]=Utils.subst_vars(get_param(name,default).replace('/',os.sep),env) + except TypeError: + complete=False + if not complete: + lst=[name for name,_,_ in _options if not env[name.upper()]] + raise conf.errors.WafError('Variable substitution failure %r'%lst) +def options(opt): + inst_dir=opt.add_option_group('Installation directories','By default, "waf install" will put the files in\ + "/usr/local/bin", "/usr/local/lib" etc. An installation prefix other\ + than "/usr/local" can be given using "--prefix", for example "--prefix=$HOME"') + for k in('--prefix','--destdir'): + option=opt.parser.get_option(k) + if option: + opt.parser.remove_option(k) + inst_dir.add_option(option) + inst_dir.add_option('--exec-prefix',help='installation prefix [Default: ${PREFIX}]',default='',dest='EXEC_PREFIX') + dirs_options=opt.add_option_group('Pre-defined installation directories','') + for name,help,default in _options: + option_name='--'+name + str_default=default + str_help='%s [Default: %s]'%(help,str_default) + dirs_options.add_option(option_name,help=str_help,default='',dest=name.upper()) diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gxx.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gxx.py new file mode 100644 index 0000000..475f7fd --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/gxx.py @@ -0,0 +1,98 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +from waflib.Tools import ccroot,ar +from waflib.Configure import conf +@conf +def find_gxx(conf): + cxx=conf.find_program(['g++','c++'],var='CXX') + cxx=conf.cmd_to_list(cxx) + conf.get_cc_version(cxx,gcc=True) + conf.env.CXX_NAME='gcc' + conf.env.CXX=cxx +@conf +def gxx_common_flags(conf): + v=conf.env + v['CXX_SRC_F']=[] + v['CXX_TGT_F']=['-c','-o'] + if not v['LINK_CXX']:v['LINK_CXX']=v['CXX'] + v['CXXLNK_SRC_F']=[] + v['CXXLNK_TGT_F']=['-o'] + v['CPPPATH_ST']='-I%s' + v['DEFINES_ST']='-D%s' + v['LIB_ST']='-l%s' + v['LIBPATH_ST']='-L%s' + v['STLIB_ST']='-l%s' + v['STLIBPATH_ST']='-L%s' + v['RPATH_ST']='-Wl,-rpath,%s' + v['SONAME_ST']='-Wl,-h,%s' + v['SHLIB_MARKER']='-Wl,-Bdynamic' + v['STLIB_MARKER']='-Wl,-Bstatic' + v['cxxprogram_PATTERN']='%s' + v['CXXFLAGS_cxxshlib']=['-fPIC'] + v['LINKFLAGS_cxxshlib']=['-shared'] + v['cxxshlib_PATTERN']='lib%s.so' + v['LINKFLAGS_cxxstlib']=['-Wl,-Bstatic'] + v['cxxstlib_PATTERN']='lib%s.a' + v['LINKFLAGS_MACBUNDLE']=['-bundle','-undefined','dynamic_lookup'] + v['CXXFLAGS_MACBUNDLE']=['-fPIC'] + v['macbundle_PATTERN']='%s.bundle' +@conf +def gxx_modifier_win32(conf): + v=conf.env + v['cxxprogram_PATTERN']='%s.exe' + v['cxxshlib_PATTERN']='%s.dll' + v['implib_PATTERN']='lib%s.dll.a' + v['IMPLIB_ST']='-Wl,--out-implib,%s' + v['CXXFLAGS_cxxshlib']=[] + v.append_value('LINKFLAGS',['-Wl,--enable-auto-import']) +@conf +def gxx_modifier_cygwin(conf): + gxx_modifier_win32(conf) + v=conf.env + v['cxxshlib_PATTERN']='cyg%s.dll' + v.append_value('LINKFLAGS_cxxshlib',['-Wl,--enable-auto-image-base']) + v['CXXFLAGS_cxxshlib']=[] +@conf +def gxx_modifier_darwin(conf): + v=conf.env + v['CXXFLAGS_cxxshlib']=['-fPIC'] + v['LINKFLAGS_cxxshlib']=['-dynamiclib','-Wl,-compatibility_version,1','-Wl,-current_version,1'] + v['cxxshlib_PATTERN']='lib%s.dylib' + v['FRAMEWORKPATH_ST']='-F%s' + v['FRAMEWORK_ST']=['-framework'] + v['ARCH_ST']=['-arch'] + v['LINKFLAGS_cxxstlib']=[] + v['SHLIB_MARKER']=[] + v['STLIB_MARKER']=[] + v['SONAME_ST']=[] +@conf +def gxx_modifier_aix(conf): + v=conf.env + v['LINKFLAGS_cxxprogram']=['-Wl,-brtl'] + v['LINKFLAGS_cxxshlib']=['-shared','-Wl,-brtl,-bexpfull'] + v['SHLIB_MARKER']=[] +@conf +def gxx_modifier_hpux(conf): + v=conf.env + v['SHLIB_MARKER']=[] + v['STLIB_MARKER']='-Bstatic' + v['CFLAGS_cxxshlib']=['-fPIC','-DPIC'] + v['cxxshlib_PATTERN']='lib%s.sl' +@conf +def gxx_modifier_openbsd(conf): + conf.env.SONAME_ST=[] +@conf +def gxx_modifier_platform(conf): + gxx_modifier_func=getattr(conf,'gxx_modifier_'+conf.env.DEST_OS,None) + if gxx_modifier_func: + gxx_modifier_func() +def configure(conf): + conf.find_gxx() + conf.find_ar() + conf.gxx_common_flags() + conf.gxx_modifier_platform() + conf.cxx_load_tools() + conf.cxx_add_flags() + conf.link_add_flags() diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/icc.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/icc.py new file mode 100644 index 0000000..7c75e18 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/icc.py @@ -0,0 +1,30 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,sys +from waflib.Tools import ccroot,ar,gcc +from waflib.Configure import conf +@conf +def find_icc(conf): + if sys.platform=='cygwin': + conf.fatal('The Intel compiler does not work on Cygwin') + v=conf.env + cc=None + if v['CC']:cc=v['CC'] + elif'CC'in conf.environ:cc=conf.environ['CC'] + if not cc:cc=conf.find_program('icc',var='CC') + if not cc:cc=conf.find_program('ICL',var='CC') + if not cc:conf.fatal('Intel C Compiler (icc) was not found') + cc=conf.cmd_to_list(cc) + conf.get_cc_version(cc,icc=True) + v['CC']=cc + v['CC_NAME']='icc' +def configure(conf): + conf.find_icc() + conf.find_ar() + conf.gcc_common_flags() + conf.gcc_modifier_platform() + conf.cc_load_tools() + conf.cc_add_flags() + conf.link_add_flags() diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/icpc.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/icpc.py new file mode 100644 index 0000000..14a5325 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/icpc.py @@ -0,0 +1,29 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,sys +from waflib.Tools import ccroot,ar,gxx +from waflib.Configure import conf +@conf +def find_icpc(conf): + if sys.platform=='cygwin': + conf.fatal('The Intel compiler does not work on Cygwin') + v=conf.env + cxx=None + if v['CXX']:cxx=v['CXX'] + elif'CXX'in conf.environ:cxx=conf.environ['CXX'] + if not cxx:cxx=conf.find_program('icpc',var='CXX') + if not cxx:conf.fatal('Intel C++ Compiler (icpc) was not found') + cxx=conf.cmd_to_list(cxx) + conf.get_cc_version(cxx,icc=True) + v['CXX']=cxx + v['CXX_NAME']='icc' +def configure(conf): + conf.find_icpc() + conf.find_ar() + conf.gxx_common_flags() + conf.gxx_modifier_platform() + conf.cxx_load_tools() + conf.cxx_add_flags() + conf.link_add_flags() diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/ifort.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/ifort.py new file mode 100644 index 0000000..a9f2528 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/ifort.py @@ -0,0 +1,49 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import re +from waflib import Utils +from waflib.Tools import fc,fc_config,fc_scan,ar +from waflib.Configure import conf +@conf +def find_ifort(conf): + fc=conf.find_program('ifort',var='FC') + fc=conf.cmd_to_list(fc) + conf.get_ifort_version(fc) + conf.env.FC_NAME='IFORT' +@conf +def ifort_modifier_cygwin(conf): + raise NotImplementedError("Ifort on cygwin not yet implemented") +@conf +def ifort_modifier_win32(conf): + fc_config.fortran_modifier_win32(conf) +@conf +def ifort_modifier_darwin(conf): + fc_config.fortran_modifier_darwin(conf) +@conf +def ifort_modifier_platform(conf): + dest_os=conf.env['DEST_OS']or Utils.unversioned_sys_platform() + ifort_modifier_func=getattr(conf,'ifort_modifier_'+dest_os,None) + if ifort_modifier_func: + ifort_modifier_func() +@conf +def get_ifort_version(conf,fc): + version_re=re.compile(r"ifort\s*\(IFORT\)\s*(?P<major>\d*)\.(?P<minor>\d*)",re.I).search + cmd=fc+['--version'] + out,err=fc_config.getoutput(conf,cmd,stdin=False) + if out: + match=version_re(out) + else: + match=version_re(err) + if not match: + conf.fatal('cannot determine ifort version.') + k=match.groupdict() + conf.env['FC_VERSION']=(k['major'],k['minor']) +def configure(conf): + conf.find_ifort() + conf.find_program('xiar',var='AR') + conf.env.ARFLAGS='rcs' + conf.fc_flags() + conf.fc_add_flags() + conf.ifort_modifier_platform() diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/intltool.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/intltool.py new file mode 100644 index 0000000..d558674 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/intltool.py @@ -0,0 +1,77 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,re +from waflib import Configure,TaskGen,Task,Utils,Runner,Options,Build,Logs +import waflib.Tools.ccroot +from waflib.TaskGen import feature,before_method +from waflib.Logs import error +@before_method('process_source') +@feature('intltool_in') +def apply_intltool_in_f(self): + try:self.meths.remove('process_source') + except ValueError:pass + if not self.env.LOCALEDIR: + self.env.LOCALEDIR=self.env.PREFIX+'/share/locale' + for i in self.to_list(self.source): + node=self.path.find_resource(i) + podir=getattr(self,'podir','po') + podirnode=self.path.find_dir(podir) + if not podirnode: + error("could not find the podir %r"%podir) + continue + cache=getattr(self,'intlcache','.intlcache') + self.env['INTLCACHE']=os.path.join(self.path.bldpath(),podir,cache) + self.env['INTLPODIR']=podirnode.bldpath() + self.env['INTLFLAGS']=getattr(self,'flags',['-q','-u','-c']) + task=self.create_task('intltool',node,node.change_ext('')) + inst=getattr(self,'install_path','${LOCALEDIR}') + if inst: + self.bld.install_files(inst,task.outputs) +@feature('intltool_po') +def apply_intltool_po(self): + try:self.meths.remove('process_source') + except ValueError:pass + if not self.env.LOCALEDIR: + self.env.LOCALEDIR=self.env.PREFIX+'/share/locale' + appname=getattr(self,'appname','set_your_app_name') + podir=getattr(self,'podir','') + inst=getattr(self,'install_path','${LOCALEDIR}') + linguas=self.path.find_node(os.path.join(podir,'LINGUAS')) + if linguas: + file=open(linguas.abspath()) + langs=[] + for line in file.readlines(): + if not line.startswith('#'): + langs+=line.split() + file.close() + re_linguas=re.compile('[-a-zA-Z_@.]+') + for lang in langs: + if re_linguas.match(lang): + node=self.path.find_resource(os.path.join(podir,re_linguas.match(lang).group()+'.po')) + task=self.create_task('po',node,node.change_ext('.mo')) + if inst: + filename=task.outputs[0].name + (langname,ext)=os.path.splitext(filename) + inst_file=inst+os.sep+langname+os.sep+'LC_MESSAGES'+os.sep+appname+'.mo' + self.bld.install_as(inst_file,task.outputs[0],chmod=getattr(self,'chmod',Utils.O644),env=task.env) + else: + Logs.pprint('RED',"Error no LINGUAS file found in po directory") +class po(Task.Task): + run_str='${MSGFMT} -o ${TGT} ${SRC}' + color='BLUE' +class intltool(Task.Task): + run_str='${INTLTOOL} ${INTLFLAGS} ${INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}' + color='BLUE' +def configure(conf): + conf.find_program('msgfmt',var='MSGFMT') + conf.find_perl_program('intltool-merge',var='INTLTOOL') + prefix=conf.env.PREFIX + datadir=conf.env.DATADIR + if not datadir: + datadir=os.path.join(prefix,'share') + conf.define('LOCALEDIR',os.path.join(datadir,'locale').replace('\\','\\\\')) + conf.define('DATADIR',datadir.replace('\\','\\\\')) + if conf.env.CC or conf.env.CXX: + conf.check(header_name='locale.h') diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/irixcc.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/irixcc.py new file mode 100644 index 0000000..8dbdfca --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/irixcc.py @@ -0,0 +1,48 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os +from waflib import Utils +from waflib.Tools import ccroot,ar +from waflib.Configure import conf +@conf +def find_irixcc(conf): + v=conf.env + cc=None + if v['CC']:cc=v['CC'] + elif'CC'in conf.environ:cc=conf.environ['CC'] + if not cc:cc=conf.find_program('cc',var='CC') + if not cc:conf.fatal('irixcc was not found') + cc=conf.cmd_to_list(cc) + try: + conf.cmd_and_log(cc+['-version']) + except Exception: + conf.fatal('%r -version could not be executed'%cc) + v['CC']=cc + v['CC_NAME']='irix' +@conf +def irixcc_common_flags(conf): + v=conf.env + v['CC_SRC_F']='' + v['CC_TGT_F']=['-c','-o'] + v['CPPPATH_ST']='-I%s' + v['DEFINES_ST']='-D%s' + if not v['LINK_CC']:v['LINK_CC']=v['CC'] + v['CCLNK_SRC_F']='' + v['CCLNK_TGT_F']=['-o'] + v['LIB_ST']='-l%s' + v['LIBPATH_ST']='-L%s' + v['STLIB_ST']='-l%s' + v['STLIBPATH_ST']='-L%s' + v['cprogram_PATTERN']='%s' + v['cshlib_PATTERN']='lib%s.so' + v['cstlib_PATTERN']='lib%s.a' +def configure(conf): + conf.find_irixcc() + conf.find_cpp() + conf.find_ar() + conf.irixcc_common_flags() + conf.cc_load_tools() + conf.cc_add_flags() + conf.link_add_flags() diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/javaw.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/javaw.py new file mode 100644 index 0000000..b84cdc3 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/javaw.py @@ -0,0 +1,309 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,re,tempfile,shutil +from waflib import TaskGen,Task,Utils,Options,Build,Errors,Node,Logs +from waflib.Configure import conf +from waflib.TaskGen import feature,before_method,after_method +from waflib.Tools import ccroot +ccroot.USELIB_VARS['javac']=set(['CLASSPATH','JAVACFLAGS']) +SOURCE_RE='**/*.java' +JAR_RE='**/*' +class_check_source=''' +public class Test { + public static void main(String[] argv) { + Class lib; + if (argv.length < 1) { + System.err.println("Missing argument"); + System.exit(77); + } + try { + lib = Class.forName(argv[0]); + } catch (ClassNotFoundException e) { + System.err.println("ClassNotFoundException"); + System.exit(1); + } + lib = null; + System.exit(0); + } +} +''' +@feature('javac') +@before_method('process_source') +def apply_java(self): + Utils.def_attrs(self,jarname='',classpath='',sourcepath='.',srcdir='.',jar_mf_attributes={},jar_mf_classpath=[]) + nodes_lst=[] + outdir=getattr(self,'outdir',None) + if outdir: + if not isinstance(outdir,Node.Node): + outdir=self.path.get_bld().make_node(self.outdir) + else: + outdir=self.path.get_bld() + outdir.mkdir() + self.outdir=outdir + self.env['OUTDIR']=outdir.abspath() + self.javac_task=tsk=self.create_task('javac') + tmp=[] + srcdir=getattr(self,'srcdir','') + if isinstance(srcdir,Node.Node): + srcdir=[srcdir] + for x in Utils.to_list(srcdir): + if isinstance(x,Node.Node): + y=x + else: + y=self.path.find_dir(x) + if not y: + self.bld.fatal('Could not find the folder %s from %s'%(x,self.path)) + tmp.append(y) + tsk.srcdir=tmp + if getattr(self,'compat',None): + tsk.env.append_value('JAVACFLAGS',['-source',self.compat]) + if hasattr(self,'sourcepath'): + fold=[isinstance(x,Node.Node)and x or self.path.find_dir(x)for x in self.to_list(self.sourcepath)] + names=os.pathsep.join([x.srcpath()for x in fold]) + else: + names=[x.srcpath()for x in tsk.srcdir] + if names: + tsk.env.append_value('JAVACFLAGS',['-sourcepath',names]) +@feature('javac') +@after_method('apply_java') +def use_javac_files(self): + lst=[] + self.uselib=self.to_list(getattr(self,'uselib',[])) + names=self.to_list(getattr(self,'use',[])) + get=self.bld.get_tgen_by_name + for x in names: + try: + y=get(x) + except Exception: + self.uselib.append(x) + else: + y.post() + lst.append(y.jar_task.outputs[0].abspath()) + self.javac_task.set_run_after(y.jar_task) + if lst: + self.env.append_value('CLASSPATH',lst) +@feature('javac') +@after_method('apply_java','propagate_uselib_vars','use_javac_files') +def set_classpath(self): + self.env.append_value('CLASSPATH',getattr(self,'classpath',[])) + for x in self.tasks: + x.env.CLASSPATH=os.pathsep.join(self.env.CLASSPATH)+os.pathsep +@feature('jar') +@after_method('apply_java','use_javac_files') +@before_method('process_source') +def jar_files(self): + destfile=getattr(self,'destfile','test.jar') + jaropts=getattr(self,'jaropts',[]) + manifest=getattr(self,'manifest',None) + basedir=getattr(self,'basedir',None) + if basedir: + if not isinstance(self.basedir,Node.Node): + basedir=self.path.get_bld().make_node(basedir) + else: + basedir=self.path.get_bld() + if not basedir: + self.bld.fatal('Could not find the basedir %r for %r'%(self.basedir,self)) + self.jar_task=tsk=self.create_task('jar_create') + if manifest: + jarcreate=getattr(self,'jarcreate','cfm') + node=self.path.find_node(manifest) + tsk.dep_nodes.append(node) + jaropts.insert(0,node.abspath()) + else: + jarcreate=getattr(self,'jarcreate','cf') + if not isinstance(destfile,Node.Node): + destfile=self.path.find_or_declare(destfile) + if not destfile: + self.bld.fatal('invalid destfile %r for %r'%(destfile,self)) + tsk.set_outputs(destfile) + tsk.basedir=basedir + jaropts.append('-C') + jaropts.append(basedir.bldpath()) + jaropts.append('.') + tsk.env['JAROPTS']=jaropts + tsk.env['JARCREATE']=jarcreate + if getattr(self,'javac_task',None): + tsk.set_run_after(self.javac_task) +@feature('jar') +@after_method('jar_files') +def use_jar_files(self): + lst=[] + self.uselib=self.to_list(getattr(self,'uselib',[])) + names=self.to_list(getattr(self,'use',[])) + get=self.bld.get_tgen_by_name + for x in names: + try: + y=get(x) + except Exception: + self.uselib.append(x) + else: + y.post() + self.jar_task.run_after.update(y.tasks) +class jar_create(Task.Task): + color='GREEN' + run_str='${JAR} ${JARCREATE} ${TGT} ${JAROPTS}' + def runnable_status(self): + for t in self.run_after: + if not t.hasrun: + return Task.ASK_LATER + if not self.inputs: + global JAR_RE + try: + self.inputs=[x for x in self.basedir.ant_glob(JAR_RE,remove=False)if id(x)!=id(self.outputs[0])] + except Exception: + raise Errors.WafError('Could not find the basedir %r for %r'%(self.basedir,self)) + return super(jar_create,self).runnable_status() +class javac(Task.Task): + color='BLUE' + nocache=True + vars=['CLASSPATH','JAVACFLAGS','JAVAC','OUTDIR'] + def runnable_status(self): + for t in self.run_after: + if not t.hasrun: + return Task.ASK_LATER + if not self.inputs: + global SOURCE_RE + self.inputs=[] + for x in self.srcdir: + self.inputs.extend(x.ant_glob(SOURCE_RE,remove=False)) + return super(javac,self).runnable_status() + def run(self): + env=self.env + gen=self.generator + bld=gen.bld + wd=bld.bldnode.abspath() + def to_list(xx): + if isinstance(xx,str):return[xx] + return xx + cmd=[] + cmd.extend(to_list(env['JAVAC'])) + cmd.extend(['-classpath']) + cmd.extend(to_list(env['CLASSPATH'])) + cmd.extend(['-d']) + cmd.extend(to_list(env['OUTDIR'])) + cmd.extend(to_list(env['JAVACFLAGS'])) + files=[a.path_from(bld.bldnode)for a in self.inputs] + tmp=None + try: + if len(str(files))+len(str(cmd))>8192: + (fd,tmp)=tempfile.mkstemp(dir=bld.bldnode.abspath()) + try: + os.write(fd,'\n'.join(files).encode()) + finally: + if tmp: + os.close(fd) + if Logs.verbose: + Logs.debug('runner: %r'%(cmd+files)) + cmd.append('@'+tmp) + else: + cmd+=files + ret=self.exec_command(cmd,cwd=wd,env=env.env or None) + finally: + if tmp: + os.remove(tmp) + return ret + def post_run(self): + for n in self.generator.outdir.ant_glob('**/*.class'): + n.sig=Utils.h_file(n.abspath()) + self.generator.bld.task_sigs[self.uid()]=self.cache_sig +@feature('javadoc') +@after_method('process_rule') +def create_javadoc(self): + tsk=self.create_task('javadoc') + tsk.classpath=getattr(self,'classpath',[]) + self.javadoc_package=Utils.to_list(self.javadoc_package) + if not isinstance(self.javadoc_output,Node.Node): + self.javadoc_output=self.bld.path.find_or_declare(self.javadoc_output) +class javadoc(Task.Task): + color='BLUE' + def __str__(self): + return'%s: %s -> %s\n'%(self.__class__.__name__,self.generator.srcdir,self.generator.javadoc_output) + def run(self): + env=self.env + bld=self.generator.bld + wd=bld.bldnode.abspath() + srcpath=self.generator.path.abspath()+os.sep+self.generator.srcdir + srcpath+=os.pathsep + srcpath+=self.generator.path.get_bld().abspath()+os.sep+self.generator.srcdir + classpath=env.CLASSPATH + classpath+=os.pathsep + classpath+=os.pathsep.join(self.classpath) + classpath="".join(classpath) + self.last_cmd=lst=[] + lst.extend(Utils.to_list(env['JAVADOC'])) + lst.extend(['-d',self.generator.javadoc_output.abspath()]) + lst.extend(['-sourcepath',srcpath]) + lst.extend(['-classpath',classpath]) + lst.extend(['-subpackages']) + lst.extend(self.generator.javadoc_package) + lst=[x for x in lst if x] + self.generator.bld.cmd_and_log(lst,cwd=wd,env=env.env or None,quiet=0) + def post_run(self): + nodes=self.generator.javadoc_output.ant_glob('**') + for x in nodes: + x.sig=Utils.h_file(x.abspath()) + self.generator.bld.task_sigs[self.uid()]=self.cache_sig +def configure(self): + java_path=self.environ['PATH'].split(os.pathsep) + v=self.env + if'JAVA_HOME'in self.environ: + java_path=[os.path.join(self.environ['JAVA_HOME'],'bin')]+java_path + self.env['JAVA_HOME']=[self.environ['JAVA_HOME']] + for x in'javac java jar javadoc'.split(): + self.find_program(x,var=x.upper(),path_list=java_path) + self.env[x.upper()]=self.cmd_to_list(self.env[x.upper()]) + if'CLASSPATH'in self.environ: + v['CLASSPATH']=self.environ['CLASSPATH'] + if not v['JAR']:self.fatal('jar is required for making java packages') + if not v['JAVAC']:self.fatal('javac is required for compiling java classes') + v['JARCREATE']='cf' + v['JAVACFLAGS']=[] +@conf +def check_java_class(self,classname,with_classpath=None): + javatestdir='.waf-javatest' + classpath=javatestdir + if self.env['CLASSPATH']: + classpath+=os.pathsep+self.env['CLASSPATH'] + if isinstance(with_classpath,str): + classpath+=os.pathsep+with_classpath + shutil.rmtree(javatestdir,True) + os.mkdir(javatestdir) + Utils.writef(os.path.join(javatestdir,'Test.java'),class_check_source) + self.exec_command(self.env['JAVAC']+[os.path.join(javatestdir,'Test.java')],shell=False) + cmd=self.env['JAVA']+['-cp',classpath,'Test',classname] + self.to_log("%s\n"%str(cmd)) + found=self.exec_command(cmd,shell=False) + self.msg('Checking for java class %s'%classname,not found) + shutil.rmtree(javatestdir,True) + return found +@conf +def check_jni_headers(conf): + if not conf.env.CC_NAME and not conf.env.CXX_NAME: + conf.fatal('load a compiler first (gcc, g++, ..)') + if not conf.env.JAVA_HOME: + conf.fatal('set JAVA_HOME in the system environment') + javaHome=conf.env['JAVA_HOME'][0] + dir=conf.root.find_dir(conf.env.JAVA_HOME[0]+'/include') + if dir is None: + dir=conf.root.find_dir(conf.env.JAVA_HOME[0]+'/../Headers') + if dir is None: + conf.fatal('JAVA_HOME does not seem to be set properly') + f=dir.ant_glob('**/(jni|jni_md).h') + incDirs=[x.parent.abspath()for x in f] + dir=conf.root.find_dir(conf.env.JAVA_HOME[0]) + f=dir.ant_glob('**/*jvm.(so|dll|dylib)') + libDirs=[x.parent.abspath()for x in f]or[javaHome] + f=dir.ant_glob('**/*jvm.(lib)') + if f: + libDirs=[[x,y.parent.abspath()]for x in libDirs for y in f] + for d in libDirs: + try: + conf.check(header_name='jni.h',define_name='HAVE_JNI_H',lib='jvm',libpath=d,includes=incDirs,uselib_store='JAVA',uselib='JAVA') + except Exception: + pass + else: + break + else: + conf.fatal('could not find lib jvm in %r (see config.log)'%libDirs) diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/kde4.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/kde4.py new file mode 100644 index 0000000..cd51f5f --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/kde4.py @@ -0,0 +1,48 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,sys,re +from waflib import Options,TaskGen,Task,Utils +from waflib.TaskGen import feature,after_method +@feature('msgfmt') +def apply_msgfmt(self): + for lang in self.to_list(self.langs): + node=self.path.find_resource(lang+'.po') + task=self.create_task('msgfmt',node,node.change_ext('.mo')) + langname=lang.split('/') + langname=langname[-1] + inst=getattr(self,'install_path','${KDE4_LOCALE_INSTALL_DIR}') + self.bld.install_as(inst+os.sep+langname+os.sep+'LC_MESSAGES'+os.sep+getattr(self,'appname','set_your_appname')+'.mo',task.outputs[0],chmod=getattr(self,'chmod',Utils.O644)) +class msgfmt(Task.Task): + color='BLUE' + run_str='${MSGFMT} ${SRC} -o ${TGT}' +def configure(self): + kdeconfig=self.find_program('kde4-config') + prefix=self.cmd_and_log('%s --prefix'%kdeconfig).strip() + fname='%s/share/apps/cmake/modules/KDELibsDependencies.cmake'%prefix + try:os.stat(fname) + except OSError: + fname='%s/share/kde4/apps/cmake/modules/KDELibsDependencies.cmake'%prefix + try:os.stat(fname) + except OSError:self.fatal('could not open %s'%fname) + try: + txt=Utils.readf(fname) + except(OSError,IOError): + self.fatal('could not read %s'%fname) + txt=txt.replace('\\\n','\n') + fu=re.compile('#(.*)\n') + txt=fu.sub('',txt) + setregexp=re.compile('([sS][eE][tT]\s*\()\s*([^\s]+)\s+\"([^"]+)\"\)') + found=setregexp.findall(txt) + for(_,key,val)in found: + self.env[key]=val + self.env['LIB_KDECORE']=['kdecore'] + self.env['LIB_KDEUI']=['kdeui'] + self.env['LIB_KIO']=['kio'] + self.env['LIB_KHTML']=['khtml'] + self.env['LIB_KPARTS']=['kparts'] + self.env['LIBPATH_KDECORE']=[os.path.join(self.env.KDE4_LIB_INSTALL_DIR,'kde4','devel'),self.env.KDE4_LIB_INSTALL_DIR] + self.env['INCLUDES_KDECORE']=[self.env['KDE4_INCLUDE_INSTALL_DIR']] + self.env.append_value('INCLUDES_KDECORE',[self.env['KDE4_INCLUDE_INSTALL_DIR']+os.sep+'KDE']) + self.find_program('msgfmt',var='MSGFMT') diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/ldc2.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/ldc2.py new file mode 100644 index 0000000..25b99e5 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/ldc2.py @@ -0,0 +1,37 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import sys +from waflib.Tools import ar,d +from waflib.Configure import conf +@conf +def find_ldc2(conf): + conf.find_program(['ldc2'],var='D') + out=conf.cmd_and_log([conf.env.D,'-version']) + if out.find("based on DMD v2.")==-1: + conf.fatal("detected compiler is not ldc2") +@conf +def common_flags_ldc2(conf): + v=conf.env + v['D_SRC_F']=['-c'] + v['D_TGT_F']='-of%s' + v['D_LINKER']=v['D'] + v['DLNK_SRC_F']='' + v['DLNK_TGT_F']='-of%s' + v['DINC_ST']='-I%s' + v['DSHLIB_MARKER']=v['DSTLIB_MARKER']='' + v['DSTLIB_ST']=v['DSHLIB_ST']='-L-l%s' + v['DSTLIBPATH_ST']=v['DLIBPATH_ST']='-L-L%s' + v['LINKFLAGS_dshlib']=['-L-shared'] + v['DHEADER_ext']='.di' + v['DFLAGS_d_with_header']=['-H','-Hf'] + v['D_HDR_F']='%s' + v['LINKFLAGS']=[] + v['DFLAGS_dshlib']=['-relocation-model=pic'] +def configure(conf): + conf.find_ldc2() + conf.load('ar') + conf.load('d') + conf.common_flags_ldc2() + conf.d_platform_flags() diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/lua.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/lua.py new file mode 100644 index 0000000..a0a35fc --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/lua.py @@ -0,0 +1,18 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +from waflib.TaskGen import extension +from waflib import Task,Utils +@extension('.lua') +def add_lua(self,node): + tsk=self.create_task('luac',node,node.change_ext('.luac')) + inst_to=getattr(self,'install_path',self.env.LUADIR and'${LUADIR}'or None) + if inst_to: + self.bld.install_files(inst_to,tsk.outputs) + return tsk +class luac(Task.Task): + run_str='${LUAC} -s -o ${TGT} ${SRC}' + color='PINK' +def configure(conf): + conf.find_program('luac',var='LUAC') diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/msvc.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/msvc.py new file mode 100644 index 0000000..3a7a7f2 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/msvc.py @@ -0,0 +1,749 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,sys,re,tempfile +from waflib import Utils,Task,Logs,Options,Errors +from waflib.Logs import debug,warn +from waflib.TaskGen import after_method,feature +from waflib.Configure import conf +from waflib.Tools import ccroot,c,cxx,ar,winres +g_msvc_systemlibs=''' +aclui activeds ad1 adptif adsiid advapi32 asycfilt authz bhsupp bits bufferoverflowu cabinet +cap certadm certidl ciuuid clusapi comctl32 comdlg32 comsupp comsuppd comsuppw comsuppwd comsvcs +credui crypt32 cryptnet cryptui d3d8thk daouuid dbgeng dbghelp dciman32 ddao35 ddao35d +ddao35u ddao35ud delayimp dhcpcsvc dhcpsapi dlcapi dnsapi dsprop dsuiext dtchelp +faultrep fcachdll fci fdi framedyd framedyn gdi32 gdiplus glauxglu32 gpedit gpmuuid +gtrts32w gtrtst32hlink htmlhelp httpapi icm32 icmui imagehlp imm32 iphlpapi iprop +kernel32 ksguid ksproxy ksuser libcmt libcmtd libcpmt libcpmtd loadperf lz32 mapi +mapi32 mgmtapi minidump mmc mobsync mpr mprapi mqoa mqrt msacm32 mscms mscoree +msdasc msimg32 msrating mstask msvcmrt msvcurt msvcurtd mswsock msxml2 mtx mtxdm +netapi32 nmapinmsupp npptools ntdsapi ntdsbcli ntmsapi ntquery odbc32 odbcbcp +odbccp32 oldnames ole32 oleacc oleaut32 oledb oledlgolepro32 opends60 opengl32 +osptk parser pdh penter pgobootrun pgort powrprof psapi ptrustm ptrustmd ptrustu +ptrustud qosname rasapi32 rasdlg rassapi resutils riched20 rpcndr rpcns4 rpcrt4 rtm +rtutils runtmchk scarddlg scrnsave scrnsavw secur32 sensapi setupapi sfc shell32 +shfolder shlwapi sisbkup snmpapi sporder srclient sti strsafe svcguid tapi32 thunk32 +traffic unicows url urlmon user32 userenv usp10 uuid uxtheme vcomp vcompd vdmdbg +version vfw32 wbemuuid webpost wiaguid wininet winmm winscard winspool winstrm +wintrust wldap32 wmiutils wow32 ws2_32 wsnmp32 wsock32 wst wtsapi32 xaswitch xolehlp +'''.split() +all_msvc_platforms=[('x64','amd64'),('x86','x86'),('ia64','ia64'),('x86_amd64','amd64'),('x86_ia64','ia64'),('x86_arm','arm')] +all_wince_platforms=[('armv4','arm'),('armv4i','arm'),('mipsii','mips'),('mipsii_fp','mips'),('mipsiv','mips'),('mipsiv_fp','mips'),('sh4','sh'),('x86','cex86')] +all_icl_platforms=[('intel64','amd64'),('em64t','amd64'),('ia32','x86'),('Itanium','ia64')] +def options(opt): + opt.add_option('--msvc_version',type='string',help='msvc version, eg: "msvc 10.0,msvc 9.0"',default='') + opt.add_option('--msvc_targets',type='string',help='msvc targets, eg: "x64,arm"',default='') +def setup_msvc(conf,versions,arch=False): + platforms=getattr(Options.options,'msvc_targets','').split(',') + if platforms==['']: + platforms=Utils.to_list(conf.env['MSVC_TARGETS'])or[i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms] + desired_versions=getattr(Options.options,'msvc_version','').split(',') + if desired_versions==['']: + desired_versions=conf.env['MSVC_VERSIONS']or[v for v,_ in versions][::-1] + versiondict=dict(versions) + for version in desired_versions: + try: + targets=dict(versiondict[version]) + for target in platforms: + try: + arch,(p1,p2,p3)=targets[target] + compiler,revision=version.rsplit(' ',1) + if arch: + return compiler,revision,p1,p2,p3,arch + else: + return compiler,revision,p1,p2,p3 + except KeyError:continue + except KeyError:continue + conf.fatal('msvc: Impossible to find a valid architecture for building (in setup_msvc)') +@conf +def get_msvc_version(conf,compiler,version,target,vcvars): + debug('msvc: get_msvc_version: %r %r %r',compiler,version,target) + batfile=conf.bldnode.make_node('waf-print-msvc.bat') + batfile.write("""@echo off +set INCLUDE= +set LIB= +call "%s" %s +echo PATH=%%PATH%% +echo INCLUDE=%%INCLUDE%% +echo LIB=%%LIB%%;%%LIBPATH%% +"""%(vcvars,target)) + sout=conf.cmd_and_log(['cmd','/E:on','/V:on','/C',batfile.abspath()]) + lines=sout.splitlines() + if not lines[0]: + lines.pop(0) + MSVC_PATH=MSVC_INCDIR=MSVC_LIBDIR=None + for line in lines: + if line.startswith('PATH='): + path=line[5:] + MSVC_PATH=path.split(';') + elif line.startswith('INCLUDE='): + MSVC_INCDIR=[i for i in line[8:].split(';')if i] + elif line.startswith('LIB='): + MSVC_LIBDIR=[i for i in line[4:].split(';')if i] + if None in(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR): + conf.fatal('msvc: Could not find a valid architecture for building (get_msvc_version_3)') + env=dict(os.environ) + env.update(PATH=path) + compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler) + cxx=conf.find_program(compiler_name,path_list=MSVC_PATH) + cxx=conf.cmd_to_list(cxx) + if'CL'in env: + del(env['CL']) + try: + try: + conf.cmd_and_log(cxx+['/help'],env=env) + except Exception as e: + debug('msvc: get_msvc_version: %r %r %r -> failure'%(compiler,version,target)) + debug(str(e)) + conf.fatal('msvc: cannot run the compiler (in get_msvc_version)') + else: + debug('msvc: get_msvc_version: %r %r %r -> OK',compiler,version,target) + finally: + conf.env[compiler_name]='' + return(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR) +@conf +def gather_wsdk_versions(conf,versions): + version_pattern=re.compile('^v..?.?\...?.?') + try: + all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows') + except WindowsError: + try: + all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows') + except WindowsError: + return + index=0 + while 1: + try: + version=Utils.winreg.EnumKey(all_versions,index) + except WindowsError: + break + index=index+1 + if not version_pattern.match(version): + continue + try: + msvc_version=Utils.winreg.OpenKey(all_versions,version) + path,type=Utils.winreg.QueryValueEx(msvc_version,'InstallationFolder') + except WindowsError: + continue + if os.path.isfile(os.path.join(path,'bin','SetEnv.cmd')): + targets=[] + for target,arch in all_msvc_platforms: + try: + targets.append((target,(arch,conf.get_msvc_version('wsdk',version,'/'+target,os.path.join(path,'bin','SetEnv.cmd'))))) + except conf.errors.ConfigurationError: + pass + versions.append(('wsdk '+version[1:],targets)) +def gather_wince_supported_platforms(): + supported_wince_platforms=[] + try: + ce_sdk=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Microsoft\\Windows CE Tools\\SDKs') + except WindowsError: + try: + ce_sdk=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Microsoft\\Windows CE Tools\\SDKs') + except WindowsError: + ce_sdk='' + if not ce_sdk: + return supported_wince_platforms + ce_index=0 + while 1: + try: + sdk_device=Utils.winreg.EnumKey(ce_sdk,ce_index) + except WindowsError: + break + ce_index=ce_index+1 + sdk=Utils.winreg.OpenKey(ce_sdk,sdk_device) + try: + path,type=Utils.winreg.QueryValueEx(sdk,'SDKRootDir') + except WindowsError: + try: + path,type=Utils.winreg.QueryValueEx(sdk,'SDKInformation') + path,xml=os.path.split(path) + except WindowsError: + continue + path=str(path) + path,device=os.path.split(path) + if not device: + path,device=os.path.split(path) + for arch,compiler in all_wince_platforms: + platforms=[] + if os.path.isdir(os.path.join(path,device,'Lib',arch)): + platforms.append((arch,compiler,os.path.join(path,device,'Include',arch),os.path.join(path,device,'Lib',arch))) + if platforms: + supported_wince_platforms.append((device,platforms)) + return supported_wince_platforms +def gather_msvc_detected_versions(): + version_pattern=re.compile('^(\d\d?\.\d\d?)(Exp)?$') + detected_versions=[] + for vcver,vcvar in[('VCExpress','Exp'),('VisualStudio','')]: + try: + prefix='SOFTWARE\\Wow6432node\\Microsoft\\'+vcver + all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,prefix) + except WindowsError: + try: + prefix='SOFTWARE\\Microsoft\\'+vcver + all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,prefix) + except WindowsError: + continue + index=0 + while 1: + try: + version=Utils.winreg.EnumKey(all_versions,index) + except WindowsError: + break + index=index+1 + match=version_pattern.match(version) + if not match: + continue + else: + versionnumber=float(match.group(1)) + detected_versions.append((versionnumber,version+vcvar,prefix+"\\"+version)) + def fun(tup): + return tup[0] + detected_versions.sort(key=fun) + return detected_versions +@conf +def gather_msvc_targets(conf,versions,version,vc_path): + targets=[] + if os.path.isfile(os.path.join(vc_path,'vcvarsall.bat')): + for target,realtarget in all_msvc_platforms[::-1]: + try: + targets.append((target,(realtarget,conf.get_msvc_version('msvc',version,target,os.path.join(vc_path,'vcvarsall.bat'))))) + except conf.errors.ConfigurationError: + pass + elif os.path.isfile(os.path.join(vc_path,'Common7','Tools','vsvars32.bat')): + try: + targets.append(('x86',('x86',conf.get_msvc_version('msvc',version,'x86',os.path.join(vc_path,'Common7','Tools','vsvars32.bat'))))) + except conf.errors.ConfigurationError: + pass + elif os.path.isfile(os.path.join(vc_path,'Bin','vcvars32.bat')): + try: + targets.append(('x86',('x86',conf.get_msvc_version('msvc',version,'',os.path.join(vc_path,'Bin','vcvars32.bat'))))) + except conf.errors.ConfigurationError: + pass + if targets: + versions.append(('msvc '+version,targets)) +@conf +def gather_wince_targets(conf,versions,version,vc_path,vsvars,supported_platforms): + for device,platforms in supported_platforms: + cetargets=[] + for platform,compiler,include,lib in platforms: + winCEpath=os.path.join(vc_path,'ce') + if not os.path.isdir(winCEpath): + continue + try: + common_bindirs,_1,_2=conf.get_msvc_version('msvc',version,'x86',vsvars) + except conf.errors.ConfigurationError: + continue + if os.path.isdir(os.path.join(winCEpath,'lib',platform)): + bindirs=[os.path.join(winCEpath,'bin',compiler),os.path.join(winCEpath,'bin','x86_'+compiler)]+common_bindirs + incdirs=[os.path.join(winCEpath,'include'),os.path.join(winCEpath,'atlmfc','include'),include] + libdirs=[os.path.join(winCEpath,'lib',platform),os.path.join(winCEpath,'atlmfc','lib',platform),lib] + cetargets.append((platform,(platform,(bindirs,incdirs,libdirs)))) + if cetargets: + versions.append((device+' '+version,cetargets)) +@conf +def gather_winphone_targets(conf,versions,version,vc_path,vsvars): + targets=[] + for target,realtarget in all_msvc_platforms[::-1]: + try: + targets.append((target,(realtarget,conf.get_msvc_version('winphone',version,target,vsvars)))) + except conf.errors.ConfigurationError as e: + pass + if targets: + versions.append(('winphone '+version,targets)) +@conf +def gather_msvc_versions(conf,versions): + vc_paths=[] + for(v,version,reg)in gather_msvc_detected_versions(): + try: + try: + msvc_version=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,reg+"\\Setup\\VC") + except WindowsError: + msvc_version=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,reg+"\\Setup\\Microsoft Visual C++") + path,type=Utils.winreg.QueryValueEx(msvc_version,'ProductDir') + vc_paths.append((version,os.path.abspath(str(path)))) + except WindowsError: + continue + wince_supported_platforms=gather_wince_supported_platforms() + for version,vc_path in vc_paths: + vs_path=os.path.dirname(vc_path) + vsvars=os.path.join(vs_path,'Common7','Tools','vsvars32.bat') + if wince_supported_platforms and os.path.isfile(vsvars): + conf.gather_wince_targets(versions,version,vc_path,vsvars,wince_supported_platforms) + vsvars=os.path.join(vs_path,'VC','WPSDK','WP80','vcvarsphoneall.bat') + if os.path.isfile(vsvars): + conf.gather_winphone_targets(versions,'8.0',vc_path,vsvars) + for version,vc_path in vc_paths: + vs_path=os.path.dirname(vc_path) + conf.gather_msvc_targets(versions,version,vc_path) +@conf +def gather_icl_versions(conf,versions): + version_pattern=re.compile('^...?.?\....?.?') + try: + all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Intel\\Compilers\\C++') + except WindowsError: + try: + all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Intel\\Compilers\\C++') + except WindowsError: + return + index=0 + while 1: + try: + version=Utils.winreg.EnumKey(all_versions,index) + except WindowsError: + break + index=index+1 + if not version_pattern.match(version): + continue + targets=[] + for target,arch in all_icl_platforms: + try: + if target=='intel64':targetDir='EM64T_NATIVE' + else:targetDir=target + Utils.winreg.OpenKey(all_versions,version+'\\'+targetDir) + icl_version=Utils.winreg.OpenKey(all_versions,version) + path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir') + batch_file=os.path.join(path,'bin','iclvars.bat') + if os.path.isfile(batch_file): + try: + targets.append((target,(arch,conf.get_msvc_version('intel',version,target,batch_file)))) + except conf.errors.ConfigurationError: + pass + except WindowsError: + pass + for target,arch in all_icl_platforms: + try: + icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+target) + path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir') + batch_file=os.path.join(path,'bin','iclvars.bat') + if os.path.isfile(batch_file): + try: + targets.append((target,(arch,conf.get_msvc_version('intel',version,target,batch_file)))) + except conf.errors.ConfigurationError: + pass + except WindowsError: + continue + major=version[0:2] + versions.append(('intel '+major,targets)) +@conf +def gather_intel_composer_versions(conf,versions): + version_pattern=re.compile('^...?.?\...?.?.?') + try: + all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Intel\\Suites') + except WindowsError: + try: + all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Intel\\Suites') + except WindowsError: + return + index=0 + while 1: + try: + version=Utils.winreg.EnumKey(all_versions,index) + except WindowsError: + break + index=index+1 + if not version_pattern.match(version): + continue + targets=[] + for target,arch in all_icl_platforms: + try: + if target=='intel64':targetDir='EM64T_NATIVE' + else:targetDir=target + try: + defaults=Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\'+targetDir) + except WindowsError: + if targetDir=='EM64T_NATIVE': + defaults=Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\EM64T') + else: + raise WindowsError + uid,type=Utils.winreg.QueryValueEx(defaults,'SubKey') + Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++\\'+targetDir) + icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++') + path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir') + batch_file=os.path.join(path,'bin','iclvars.bat') + if os.path.isfile(batch_file): + try: + targets.append((target,(arch,conf.get_msvc_version('intel',version,target,batch_file)))) + except conf.errors.ConfigurationError as e: + pass + compilervars_warning_attr='_compilervars_warning_key' + if version[0:2]=='13'and getattr(conf,compilervars_warning_attr,True): + setattr(conf,compilervars_warning_attr,False) + patch_url='http://software.intel.com/en-us/forums/topic/328487' + compilervars_arch=os.path.join(path,'bin','compilervars_arch.bat') + for vscomntool in['VS110COMNTOOLS','VS100COMNTOOLS']: + if vscomntool in os.environ: + vs_express_path=os.environ[vscomntool]+r'..\IDE\VSWinExpress.exe' + dev_env_path=os.environ[vscomntool]+r'..\IDE\devenv.exe' + if(r'if exist "%VS110COMNTOOLS%..\IDE\VSWinExpress.exe"'in Utils.readf(compilervars_arch)and not os.path.exists(vs_express_path)and not os.path.exists(dev_env_path)): + Logs.warn(('The Intel compilervar_arch.bat only checks for one Visual Studio SKU ''(VSWinExpress.exe) but it does not seem to be installed at %r. ''The intel command line set up will fail to configure unless the file %r''is patched. See: %s')%(vs_express_path,compilervars_arch,patch_url)) + except WindowsError: + pass + major=version[0:2] + versions.append(('intel '+major,targets)) +@conf +def get_msvc_versions(conf): + if not conf.env['MSVC_INSTALLED_VERSIONS']: + lst=[] + conf.gather_icl_versions(lst) + conf.gather_intel_composer_versions(lst) + conf.gather_wsdk_versions(lst) + conf.gather_msvc_versions(lst) + conf.env['MSVC_INSTALLED_VERSIONS']=lst + return conf.env['MSVC_INSTALLED_VERSIONS'] +@conf +def print_all_msvc_detected(conf): + for version,targets in conf.env['MSVC_INSTALLED_VERSIONS']: + Logs.info(version) + for target,l in targets: + Logs.info("\t"+target) +@conf +def detect_msvc(conf,arch=False): + versions=get_msvc_versions(conf) + return setup_msvc(conf,versions,arch) +@conf +def find_lt_names_msvc(self,libname,is_static=False): + lt_names=['lib%s.la'%libname,'%s.la'%libname,] + for path in self.env['LIBPATH']: + for la in lt_names: + laf=os.path.join(path,la) + dll=None + if os.path.exists(laf): + ltdict=Utils.read_la_file(laf) + lt_libdir=None + if ltdict.get('libdir',''): + lt_libdir=ltdict['libdir'] + if not is_static and ltdict.get('library_names',''): + dllnames=ltdict['library_names'].split() + dll=dllnames[0].lower() + dll=re.sub('\.dll$','',dll) + return(lt_libdir,dll,False) + elif ltdict.get('old_library',''): + olib=ltdict['old_library'] + if os.path.exists(os.path.join(path,olib)): + return(path,olib,True) + elif lt_libdir!=''and os.path.exists(os.path.join(lt_libdir,olib)): + return(lt_libdir,olib,True) + else: + return(None,olib,True) + else: + raise self.errors.WafError('invalid libtool object file: %s'%laf) + return(None,None,None) +@conf +def libname_msvc(self,libname,is_static=False): + lib=libname.lower() + lib=re.sub('\.lib$','',lib) + if lib in g_msvc_systemlibs: + return lib + lib=re.sub('^lib','',lib) + if lib=='m': + return None + (lt_path,lt_libname,lt_static)=self.find_lt_names_msvc(lib,is_static) + if lt_path!=None and lt_libname!=None: + if lt_static==True: + return os.path.join(lt_path,lt_libname) + if lt_path!=None: + _libpaths=[lt_path]+self.env['LIBPATH'] + else: + _libpaths=self.env['LIBPATH'] + static_libs=['lib%ss.lib'%lib,'lib%s.lib'%lib,'%ss.lib'%lib,'%s.lib'%lib,] + dynamic_libs=['lib%s.dll.lib'%lib,'lib%s.dll.a'%lib,'%s.dll.lib'%lib,'%s.dll.a'%lib,'lib%s_d.lib'%lib,'%s_d.lib'%lib,'%s.lib'%lib,] + libnames=static_libs + if not is_static: + libnames=dynamic_libs+static_libs + for path in _libpaths: + for libn in libnames: + if os.path.exists(os.path.join(path,libn)): + debug('msvc: lib found: %s'%os.path.join(path,libn)) + return re.sub('\.lib$','',libn) + self.fatal("The library %r could not be found"%libname) + return re.sub('\.lib$','',libname) +@conf +def check_lib_msvc(self,libname,is_static=False,uselib_store=None): + libn=self.libname_msvc(libname,is_static) + if not uselib_store: + uselib_store=libname.upper() + if False and is_static: + self.env['STLIB_'+uselib_store]=[libn] + else: + self.env['LIB_'+uselib_store]=[libn] +@conf +def check_libs_msvc(self,libnames,is_static=False): + for libname in Utils.to_list(libnames): + self.check_lib_msvc(libname,is_static) +def configure(conf): + conf.autodetect(True) + conf.find_msvc() + conf.msvc_common_flags() + conf.cc_load_tools() + conf.cxx_load_tools() + conf.cc_add_flags() + conf.cxx_add_flags() + conf.link_add_flags() + conf.visual_studio_add_flags() +@conf +def no_autodetect(conf): + conf.env.NO_MSVC_DETECT=1 + configure(conf) +@conf +def autodetect(conf,arch=False): + v=conf.env + if v.NO_MSVC_DETECT: + return + if arch: + compiler,version,path,includes,libdirs,arch=conf.detect_msvc(True) + v['DEST_CPU']=arch + else: + compiler,version,path,includes,libdirs=conf.detect_msvc() + v['PATH']=path + v['INCLUDES']=includes + v['LIBPATH']=libdirs + v['MSVC_COMPILER']=compiler + try: + v['MSVC_VERSION']=float(version) + except Exception: + v['MSVC_VERSION']=float(version[:-3]) +def _get_prog_names(conf,compiler): + if compiler=='intel': + compiler_name='ICL' + linker_name='XILINK' + lib_name='XILIB' + else: + compiler_name='CL' + linker_name='LINK' + lib_name='LIB' + return compiler_name,linker_name,lib_name +@conf +def find_msvc(conf): + if sys.platform=='cygwin': + conf.fatal('MSVC module does not work under cygwin Python!') + v=conf.env + path=v['PATH'] + compiler=v['MSVC_COMPILER'] + version=v['MSVC_VERSION'] + compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler) + v.MSVC_MANIFEST=(compiler=='msvc'and version>=8)or(compiler=='wsdk'and version>=6)or(compiler=='intel'and version>=11) + cxx=None + if v['CXX']:cxx=v['CXX'] + elif'CXX'in conf.environ:cxx=conf.environ['CXX'] + cxx=conf.find_program(compiler_name,var='CXX',path_list=path) + cxx=conf.cmd_to_list(cxx) + env=dict(conf.environ) + if path:env.update(PATH=';'.join(path)) + if not conf.cmd_and_log(cxx+['/nologo','/help'],env=env): + conf.fatal('the msvc compiler could not be identified') + v['CC']=v['CXX']=cxx + v['CC_NAME']=v['CXX_NAME']='msvc' + if not v['LINK_CXX']: + link=conf.find_program(linker_name,path_list=path) + if link:v['LINK_CXX']=link + else:conf.fatal('%s was not found (linker)'%linker_name) + v['LINK']=link + if not v['LINK_CC']: + v['LINK_CC']=v['LINK_CXX'] + if not v['AR']: + stliblink=conf.find_program(lib_name,path_list=path,var='AR') + if not stliblink:return + v['ARFLAGS']=['/NOLOGO'] + if v.MSVC_MANIFEST: + conf.find_program('MT',path_list=path,var='MT') + v['MTFLAGS']=['/NOLOGO'] + try: + conf.load('winres') + except Errors.WafError: + warn('Resource compiler not found. Compiling resource file is disabled') +@conf +def visual_studio_add_flags(self): + v=self.env + try:v.prepend_value('INCLUDES',[x for x in self.environ['INCLUDE'].split(';')if x]) + except Exception:pass + try:v.prepend_value('LIBPATH',[x for x in self.environ['LIB'].split(';')if x]) + except Exception:pass +@conf +def msvc_common_flags(conf): + v=conf.env + v['DEST_BINFMT']='pe' + v.append_value('CFLAGS',['/nologo']) + v.append_value('CXXFLAGS',['/nologo']) + v['DEFINES_ST']='/D%s' + v['CC_SRC_F']='' + v['CC_TGT_F']=['/c','/Fo'] + v['CXX_SRC_F']='' + v['CXX_TGT_F']=['/c','/Fo'] + if(v.MSVC_COMPILER=='msvc'and v.MSVC_VERSION>=8)or(v.MSVC_COMPILER=='wsdk'and v.MSVC_VERSION>=6): + v['CC_TGT_F']=['/FC']+v['CC_TGT_F'] + v['CXX_TGT_F']=['/FC']+v['CXX_TGT_F'] + v['CPPPATH_ST']='/I%s' + v['AR_TGT_F']=v['CCLNK_TGT_F']=v['CXXLNK_TGT_F']='/OUT:' + v['CFLAGS_CONSOLE']=v['CXXFLAGS_CONSOLE']=['/SUBSYSTEM:CONSOLE'] + v['CFLAGS_NATIVE']=v['CXXFLAGS_NATIVE']=['/SUBSYSTEM:NATIVE'] + v['CFLAGS_POSIX']=v['CXXFLAGS_POSIX']=['/SUBSYSTEM:POSIX'] + v['CFLAGS_WINDOWS']=v['CXXFLAGS_WINDOWS']=['/SUBSYSTEM:WINDOWS'] + v['CFLAGS_WINDOWSCE']=v['CXXFLAGS_WINDOWSCE']=['/SUBSYSTEM:WINDOWSCE'] + v['CFLAGS_CRT_MULTITHREADED']=v['CXXFLAGS_CRT_MULTITHREADED']=['/MT'] + v['CFLAGS_CRT_MULTITHREADED_DLL']=v['CXXFLAGS_CRT_MULTITHREADED_DLL']=['/MD'] + v['CFLAGS_CRT_MULTITHREADED_DBG']=v['CXXFLAGS_CRT_MULTITHREADED_DBG']=['/MTd'] + v['CFLAGS_CRT_MULTITHREADED_DLL_DBG']=v['CXXFLAGS_CRT_MULTITHREADED_DLL_DBG']=['/MDd'] + v['LIB_ST']='%s.lib' + v['LIBPATH_ST']='/LIBPATH:%s' + v['STLIB_ST']='%s.lib' + v['STLIBPATH_ST']='/LIBPATH:%s' + v.append_value('LINKFLAGS',['/NOLOGO']) + if v['MSVC_MANIFEST']: + v.append_value('LINKFLAGS',['/MANIFEST']) + v['CFLAGS_cshlib']=[] + v['CXXFLAGS_cxxshlib']=[] + v['LINKFLAGS_cshlib']=v['LINKFLAGS_cxxshlib']=['/DLL'] + v['cshlib_PATTERN']=v['cxxshlib_PATTERN']='%s.dll' + v['implib_PATTERN']='%s.lib' + v['IMPLIB_ST']='/IMPLIB:%s' + v['LINKFLAGS_cstlib']=[] + v['cstlib_PATTERN']=v['cxxstlib_PATTERN']='%s.lib' + v['cprogram_PATTERN']=v['cxxprogram_PATTERN']='%s.exe' +@after_method('apply_link') +@feature('c','cxx') +def apply_flags_msvc(self): + if self.env.CC_NAME!='msvc'or not getattr(self,'link_task',None): + return + is_static=isinstance(self.link_task,ccroot.stlink_task) + subsystem=getattr(self,'subsystem','') + if subsystem: + subsystem='/subsystem:%s'%subsystem + flags=is_static and'ARFLAGS'or'LINKFLAGS' + self.env.append_value(flags,subsystem) + if not is_static: + for f in self.env.LINKFLAGS: + d=f.lower() + if d[1:]=='debug': + pdbnode=self.link_task.outputs[0].change_ext('.pdb') + self.link_task.outputs.append(pdbnode) + try: + self.install_task.source.append(pdbnode) + except AttributeError: + pass + break +@feature('cprogram','cshlib','cxxprogram','cxxshlib') +@after_method('apply_link') +def apply_manifest(self): + if self.env.CC_NAME=='msvc'and self.env.MSVC_MANIFEST and getattr(self,'link_task',None): + out_node=self.link_task.outputs[0] + man_node=out_node.parent.find_or_declare(out_node.name+'.manifest') + self.link_task.outputs.append(man_node) + self.link_task.do_manifest=True +def exec_mf(self): + env=self.env + mtool=env['MT'] + if not mtool: + return 0 + self.do_manifest=False + outfile=self.outputs[0].abspath() + manifest=None + for out_node in self.outputs: + if out_node.name.endswith('.manifest'): + manifest=out_node.abspath() + break + if manifest is None: + return 0 + mode='' + if'cprogram'in self.generator.features or'cxxprogram'in self.generator.features: + mode='1' + elif'cshlib'in self.generator.features or'cxxshlib'in self.generator.features: + mode='2' + debug('msvc: embedding manifest in mode %r'%mode) + lst=[] + lst.append(env['MT']) + lst.extend(Utils.to_list(env['MTFLAGS'])) + lst.extend(['-manifest',manifest]) + lst.append('-outputresource:%s;%s'%(outfile,mode)) + lst=[lst] + return self.exec_command(*lst) +def quote_response_command(self,flag): + if flag.find(' ')>-1: + for x in('/LIBPATH:','/IMPLIB:','/OUT:','/I'): + if flag.startswith(x): + flag='%s"%s"'%(x,flag[len(x):]) + break + else: + flag='"%s"'%flag + return flag +def exec_response_command(self,cmd,**kw): + try: + tmp=None + if sys.platform.startswith('win')and isinstance(cmd,list)and len(' '.join(cmd))>=8192: + program=cmd[0] + cmd=[self.quote_response_command(x)for x in cmd] + (fd,tmp)=tempfile.mkstemp() + os.write(fd,'\r\n'.join(i.replace('\\','\\\\')for i in cmd[1:]).encode()) + os.close(fd) + cmd=[program,'@'+tmp] + ret=self.generator.bld.exec_command(cmd,**kw) + finally: + if tmp: + try: + os.remove(tmp) + except OSError: + pass + return ret +def exec_command_msvc(self,*k,**kw): + if isinstance(k[0],list): + lst=[] + carry='' + for a in k[0]: + if a=='/Fo'or a=='/doc'or a[-1]==':': + carry=a + else: + lst.append(carry+a) + carry='' + k=[lst] + if self.env['PATH']: + env=dict(self.env.env or os.environ) + env.update(PATH=';'.join(self.env['PATH'])) + kw['env']=env + bld=self.generator.bld + try: + if not kw.get('cwd',None): + kw['cwd']=bld.cwd + except AttributeError: + bld.cwd=kw['cwd']=bld.variant_dir + ret=self.exec_response_command(k[0],**kw) + if not ret and getattr(self,'do_manifest',None): + ret=self.exec_mf() + return ret +def wrap_class(class_name): + cls=Task.classes.get(class_name,None) + if not cls: + return None + derived_class=type(class_name,(cls,),{}) + def exec_command(self,*k,**kw): + if self.env['CC_NAME']=='msvc': + return self.exec_command_msvc(*k,**kw) + else: + return super(derived_class,self).exec_command(*k,**kw) + derived_class.exec_command=exec_command + derived_class.exec_response_command=exec_response_command + derived_class.quote_response_command=quote_response_command + derived_class.exec_command_msvc=exec_command_msvc + derived_class.exec_mf=exec_mf + return derived_class +for k in'c cxx cprogram cxxprogram cshlib cxxshlib cstlib cxxstlib'.split(): + wrap_class(k) +def make_winapp(self,family): + append=self.env.append_unique + append('DEFINES','WINAPI_FAMILY=%s'%family) + append('CXXFLAGS','/ZW') + append('CXXFLAGS','/TP') + for lib_path in self.env.LIBPATH: + append('CXXFLAGS','/AI%s'%lib_path) +@feature('winphoneapp') +@after_method('process_use') +@after_method('propagate_uselib_vars') +def make_winphone_app(self): + make_winapp(self,'WINAPI_FAMILY_PHONE_APP') + conf.env.append_unique('LINKFLAGS','/NODEFAULTLIB:ole32.lib') + conf.env.append_unique('LINKFLAGS','PhoneAppModelHost.lib') +@feature('winapp') +@after_method('process_use') +@after_method('propagate_uselib_vars') +def make_windows_app(self): + make_winapp(self,'WINAPI_FAMILY_DESKTOP_APP') diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/nasm.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/nasm.py new file mode 100644 index 0000000..00e650a --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/nasm.py @@ -0,0 +1,16 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os +import waflib.Tools.asm +from waflib.TaskGen import feature +@feature('asm') +def apply_nasm_vars(self): + self.env.append_value('ASFLAGS',self.to_list(getattr(self,'nasm_flags',[]))) +def configure(conf): + nasm=conf.find_program(['nasm','yasm'],var='AS') + conf.env.AS_TGT_F=['-o'] + conf.env.ASLNK_TGT_F=['-o'] + conf.load('asm') + conf.env.ASMPATH_ST='-I%s'+os.sep diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/perl.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/perl.py new file mode 100644 index 0000000..8b6c2f8 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/perl.py @@ -0,0 +1,80 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os +from waflib import Task,Options,Utils +from waflib.Configure import conf +from waflib.TaskGen import extension,feature,before_method +@before_method('apply_incpaths','apply_link','propagate_uselib_vars') +@feature('perlext') +def init_perlext(self): + self.uselib=self.to_list(getattr(self,'uselib',[])) + if not'PERLEXT'in self.uselib:self.uselib.append('PERLEXT') + self.env['cshlib_PATTERN']=self.env['cxxshlib_PATTERN']=self.env['perlext_PATTERN'] +@extension('.xs') +def xsubpp_file(self,node): + outnode=node.change_ext('.c') + self.create_task('xsubpp',node,outnode) + self.source.append(outnode) +class xsubpp(Task.Task): + run_str='${PERL} ${XSUBPP} -noprototypes -typemap ${EXTUTILS_TYPEMAP} ${SRC} > ${TGT}' + color='BLUE' + ext_out=['.h'] +@conf +def check_perl_version(self,minver=None): + res=True + if minver: + cver='.'.join(map(str,minver)) + else: + cver='' + self.start_msg('Checking for minimum perl version %s'%cver) + perl=getattr(Options.options,'perlbinary',None) + if not perl: + perl=self.find_program('perl',var='PERL') + if not perl: + self.end_msg("Perl not found",color="YELLOW") + return False + self.env['PERL']=perl + version=self.cmd_and_log([perl,"-e",'printf \"%vd\", $^V']) + if not version: + res=False + version="Unknown" + elif not minver is None: + ver=tuple(map(int,version.split("."))) + if ver<minver: + res=False + self.end_msg(version,color=res and"GREEN"or"YELLOW") + return res +@conf +def check_perl_module(self,module): + cmd=[self.env['PERL'],'-e','use %s'%module] + self.start_msg('perl module %s'%module) + try: + r=self.cmd_and_log(cmd) + except Exception: + self.end_msg(False) + return None + self.end_msg(r or True) + return r +@conf +def check_perl_ext_devel(self): + env=self.env + perl=env.PERL + if not perl: + self.fatal('find perl first') + def read_out(cmd): + return Utils.to_list(self.cmd_and_log(perl+cmd)) + env['LINKFLAGS_PERLEXT']=read_out(" -MConfig -e'print $Config{lddlflags}'") + env['INCLUDES_PERLEXT']=read_out(" -MConfig -e'print \"$Config{archlib}/CORE\"'") + env['CFLAGS_PERLEXT']=read_out(" -MConfig -e'print \"$Config{ccflags} $Config{cccdlflags}\"'") + env['XSUBPP']=read_out(" -MConfig -e'print \"$Config{privlib}/ExtUtils/xsubpp$Config{exe_ext}\"'") + env['EXTUTILS_TYPEMAP']=read_out(" -MConfig -e'print \"$Config{privlib}/ExtUtils/typemap\"'") + if not getattr(Options.options,'perlarchdir',None): + env['ARCHDIR_PERL']=self.cmd_and_log(perl+" -MConfig -e'print $Config{sitearch}'") + else: + env['ARCHDIR_PERL']=getattr(Options.options,'perlarchdir') + env['perlext_PATTERN']='%s.'+self.cmd_and_log(perl+" -MConfig -e'print $Config{dlext}'") +def options(opt): + opt.add_option('--with-perl-binary',type='string',dest='perlbinary',help='Specify alternate perl binary',default=None) + opt.add_option('--with-perl-archdir',type='string',dest='perlarchdir',help='Specify directory where to install arch specific files',default=None) diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/python.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/python.py new file mode 100644 index 0000000..713f816 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/python.py @@ -0,0 +1,345 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,sys +from waflib import Utils,Options,Errors,Logs +from waflib.TaskGen import extension,before_method,after_method,feature +from waflib.Configure import conf +FRAG=''' +#include <Python.h> +#ifdef __cplusplus +extern "C" { +#endif + void Py_Initialize(void); + void Py_Finalize(void); +#ifdef __cplusplus +} +#endif +int main(int argc, char **argv) +{ + (void)argc; (void)argv; + Py_Initialize(); + Py_Finalize(); + return 0; +} +''' +INST=''' +import sys, py_compile +py_compile.compile(sys.argv[1], sys.argv[2], sys.argv[3]) +''' +DISTUTILS_IMP=['from distutils.sysconfig import get_config_var, get_python_lib'] +@extension('.py') +def process_py(self,node): + try: + if not self.bld.is_install: + return + except AttributeError: + return + try: + if not self.install_path: + return + except AttributeError: + self.install_path='${PYTHONDIR}' + def inst_py(ctx): + install_from=getattr(self,'install_from',None) + if install_from: + install_from=self.path.find_dir(install_from) + install_pyfile(self,node,install_from) + self.bld.add_post_fun(inst_py) +def install_pyfile(self,node,install_from=None): + from_node=install_from or node.parent + tsk=self.bld.install_as(self.install_path+'/'+node.path_from(from_node),node,postpone=False) + path=tsk.get_install_path() + if self.bld.is_install<0: + Logs.info("+ removing byte compiled python files") + for x in'co': + try: + os.remove(path+x) + except OSError: + pass + if self.bld.is_install>0: + try: + st1=os.stat(path) + except OSError: + Logs.error('The python file is missing, this should not happen') + for x in['c','o']: + do_inst=self.env['PY'+x.upper()] + try: + st2=os.stat(path+x) + except OSError: + pass + else: + if st1.st_mtime<=st2.st_mtime: + do_inst=False + if do_inst: + lst=(x=='o')and[self.env['PYFLAGS_OPT']]or[] + (a,b,c)=(path,path+x,tsk.get_install_path(destdir=False)+x) + argv=self.env['PYTHON']+lst+['-c',INST,a,b,c] + Logs.info('+ byte compiling %r'%(path+x)) + env=self.env.env or None + ret=Utils.subprocess.Popen(argv,env=env).wait() + if ret: + raise Errors.WafError('py%s compilation failed %r'%(x,path)) +@feature('py') +def feature_py(self): + pass +@feature('pyext') +@before_method('propagate_uselib_vars','apply_link') +@after_method('apply_bundle') +def init_pyext(self): + self.uselib=self.to_list(getattr(self,'uselib',[])) + if not'PYEXT'in self.uselib: + self.uselib.append('PYEXT') + self.env.cshlib_PATTERN=self.env.cxxshlib_PATTERN=self.env.macbundle_PATTERN=self.env.pyext_PATTERN + self.env.fcshlib_PATTERN=self.env.dshlib_PATTERN=self.env.pyext_PATTERN + try: + if not self.install_path: + return + except AttributeError: + self.install_path='${PYTHONARCHDIR}' +@feature('pyext') +@before_method('apply_link','apply_bundle') +def set_bundle(self): + if Utils.unversioned_sys_platform()=='darwin': + self.mac_bundle=True +@before_method('propagate_uselib_vars') +@feature('pyembed') +def init_pyembed(self): + self.uselib=self.to_list(getattr(self,'uselib',[])) + if not'PYEMBED'in self.uselib: + self.uselib.append('PYEMBED') +@conf +def get_python_variables(self,variables,imports=None): + if not imports: + try: + imports=self.python_imports + except AttributeError: + imports=DISTUTILS_IMP + program=list(imports) + program.append('') + for v in variables: + program.append("print(repr(%s))"%v) + os_env=dict(os.environ) + try: + del os_env['MACOSX_DEPLOYMENT_TARGET'] + except KeyError: + pass + try: + out=self.cmd_and_log(self.env.PYTHON+['-c','\n'.join(program)],env=os_env) + except Errors.WafError: + self.fatal('The distutils module is unusable: install "python-devel"?') + self.to_log(out) + return_values=[] + for s in out.split('\n'): + s=s.strip() + if not s: + continue + if s=='None': + return_values.append(None) + elif(s[0]=="'"and s[-1]=="'")or(s[0]=='"'and s[-1]=='"'): + return_values.append(eval(s)) + elif s[0].isdigit(): + return_values.append(int(s)) + else:break + return return_values +@conf +def check_python_headers(conf): + env=conf.env + if not env['CC_NAME']and not env['CXX_NAME']: + conf.fatal('load a compiler first (gcc, g++, ..)') + if not env['PYTHON_VERSION']: + conf.check_python_version() + pybin=conf.env.PYTHON + if not pybin: + conf.fatal('Could not find the python executable') + v='prefix SO LDFLAGS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET LDSHARED CFLAGS LDVERSION'.split() + try: + lst=conf.get_python_variables(["get_config_var('%s') or ''"%x for x in v]) + except RuntimeError: + conf.fatal("Python development headers not found (-v for details).") + vals=['%s = %r'%(x,y)for(x,y)in zip(v,lst)] + conf.to_log("Configuration returned from %r:\n%r\n"%(pybin,'\n'.join(vals))) + dct=dict(zip(v,lst)) + x='MACOSX_DEPLOYMENT_TARGET' + if dct[x]: + conf.env[x]=conf.environ[x]=dct[x] + env['pyext_PATTERN']='%s'+dct['SO'] + all_flags=dct['LDFLAGS']+' '+dct['CFLAGS'] + conf.parse_flags(all_flags,'PYEMBED') + all_flags=dct['LDFLAGS']+' '+dct['LDSHARED']+' '+dct['CFLAGS'] + conf.parse_flags(all_flags,'PYEXT') + result=None + if not dct["LDVERSION"]: + dct["LDVERSION"]=env['PYTHON_VERSION'] + for name in('python'+dct['LDVERSION'],'python'+env['PYTHON_VERSION']+'m','python'+env['PYTHON_VERSION'].replace('.','')): + if not result and env['LIBPATH_PYEMBED']: + path=env['LIBPATH_PYEMBED'] + conf.to_log("\n\n# Trying default LIBPATH_PYEMBED: %r\n"%path) + result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in LIBPATH_PYEMBED'%name) + if not result and dct['LIBDIR']: + path=[dct['LIBDIR']] + conf.to_log("\n\n# try again with -L$python_LIBDIR: %r\n"%path) + result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in LIBDIR'%name) + if not result and dct['LIBPL']: + path=[dct['LIBPL']] + conf.to_log("\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n") + result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in python_LIBPL'%name) + if not result: + path=[os.path.join(dct['prefix'],"libs")] + conf.to_log("\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n") + result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in $prefix/libs'%name) + if result: + break + if result: + env['LIBPATH_PYEMBED']=path + env.append_value('LIB_PYEMBED',[name]) + else: + conf.to_log("\n\n### LIB NOT FOUND\n") + if(Utils.is_win32 or sys.platform.startswith('os2')or dct['Py_ENABLE_SHARED']): + env['LIBPATH_PYEXT']=env['LIBPATH_PYEMBED'] + env['LIB_PYEXT']=env['LIB_PYEMBED'] + num='.'.join(env['PYTHON_VERSION'].split('.')[:2]) + conf.find_program([''.join(pybin)+'-config','python%s-config'%num,'python-config-%s'%num,'python%sm-config'%num],var='PYTHON_CONFIG',mandatory=False) + includes=[] + if conf.env.PYTHON_CONFIG: + for incstr in conf.cmd_and_log([conf.env.PYTHON_CONFIG,'--includes']).strip().split(): + if(incstr.startswith('-I')or incstr.startswith('/I')): + incstr=incstr[2:] + if incstr not in includes: + includes.append(incstr) + conf.to_log("Include path for Python extensions (found via python-config --includes): %r\n"%(includes,)) + env['INCLUDES_PYEXT']=includes + env['INCLUDES_PYEMBED']=includes + else: + conf.to_log("Include path for Python extensions ""(found via distutils module): %r\n"%(dct['INCLUDEPY'],)) + env['INCLUDES_PYEXT']=[dct['INCLUDEPY']] + env['INCLUDES_PYEMBED']=[dct['INCLUDEPY']] + if env['CC_NAME']=='gcc': + env.append_value('CFLAGS_PYEMBED',['-fno-strict-aliasing']) + env.append_value('CFLAGS_PYEXT',['-fno-strict-aliasing']) + if env['CXX_NAME']=='gcc': + env.append_value('CXXFLAGS_PYEMBED',['-fno-strict-aliasing']) + env.append_value('CXXFLAGS_PYEXT',['-fno-strict-aliasing']) + if env.CC_NAME=="msvc": + from distutils.msvccompiler import MSVCCompiler + dist_compiler=MSVCCompiler() + dist_compiler.initialize() + env.append_value('CFLAGS_PYEXT',dist_compiler.compile_options) + env.append_value('CXXFLAGS_PYEXT',dist_compiler.compile_options) + env.append_value('LINKFLAGS_PYEXT',dist_compiler.ldflags_shared) + try: + conf.check(header_name='Python.h',define_name='HAVE_PYTHON_H',uselib='PYEMBED',fragment=FRAG,errmsg=':-(') + except conf.errors.ConfigurationError: + xx=conf.env.CXX_NAME and'cxx'or'c' + flags=['--cflags','--libs','--ldflags'] + for f in flags: + conf.check_cfg(msg='Asking python-config for pyembed %s flags'%f,path=conf.env.PYTHON_CONFIG,package='',uselib_store='PYEMBED',args=[f]) + conf.check(header_name='Python.h',define_name='HAVE_PYTHON_H',msg='Getting pyembed flags from python-config',fragment=FRAG,errmsg='Could not build a python embedded interpreter',features='%s %sprogram pyembed'%(xx,xx)) + for f in flags: + conf.check_cfg(msg='Asking python-config for pyext %s flags'%f,path=conf.env.PYTHON_CONFIG,package='',uselib_store='PYEXT',args=[f]) + conf.check(header_name='Python.h',define_name='HAVE_PYTHON_H',msg='Getting pyext flags from python-config',features='%s %sshlib pyext'%(xx,xx),fragment=FRAG,errmsg='Could not build python extensions') +@conf +def check_python_version(conf,minver=None): + assert minver is None or isinstance(minver,tuple) + pybin=conf.env['PYTHON'] + if not pybin: + conf.fatal('could not find the python executable') + cmd=pybin+['-c','import sys\nfor x in sys.version_info: print(str(x))'] + Logs.debug('python: Running python command %r'%cmd) + lines=conf.cmd_and_log(cmd).split() + assert len(lines)==5,"found %i lines, expected 5: %r"%(len(lines),lines) + pyver_tuple=(int(lines[0]),int(lines[1]),int(lines[2]),lines[3],int(lines[4])) + result=(minver is None)or(pyver_tuple>=minver) + if result: + pyver='.'.join([str(x)for x in pyver_tuple[:2]]) + conf.env['PYTHON_VERSION']=pyver + if'PYTHONDIR'in conf.environ: + pydir=conf.environ['PYTHONDIR'] + else: + if Utils.is_win32: + (python_LIBDEST,pydir)=conf.get_python_variables(["get_config_var('LIBDEST') or ''","get_python_lib(standard_lib=0, prefix=%r) or ''"%conf.env['PREFIX']]) + else: + python_LIBDEST=None + (pydir,)=conf.get_python_variables(["get_python_lib(standard_lib=0, prefix=%r) or ''"%conf.env['PREFIX']]) + if python_LIBDEST is None: + if conf.env['LIBDIR']: + python_LIBDEST=os.path.join(conf.env['LIBDIR'],"python"+pyver) + else: + python_LIBDEST=os.path.join(conf.env['PREFIX'],"lib","python"+pyver) + if'PYTHONARCHDIR'in conf.environ: + pyarchdir=conf.environ['PYTHONARCHDIR'] + else: + (pyarchdir,)=conf.get_python_variables(["get_python_lib(plat_specific=1, standard_lib=0, prefix=%r) or ''"%conf.env['PREFIX']]) + if not pyarchdir: + pyarchdir=pydir + if hasattr(conf,'define'): + conf.define('PYTHONDIR',pydir) + conf.define('PYTHONARCHDIR',pyarchdir) + conf.env['PYTHONDIR']=pydir + conf.env['PYTHONARCHDIR']=pyarchdir + pyver_full='.'.join(map(str,pyver_tuple[:3])) + if minver is None: + conf.msg('Checking for python version',pyver_full) + else: + minver_str='.'.join(map(str,minver)) + conf.msg('Checking for python version',pyver_tuple,">= %s"%(minver_str,)and'GREEN'or'YELLOW') + if not result: + conf.fatal('The python version is too old, expecting %r'%(minver,)) +PYTHON_MODULE_TEMPLATE=''' +import %s as current_module +version = getattr(current_module, '__version__', None) +if version is not None: + print(str(version)) +else: + print('unknown version') +''' +@conf +def check_python_module(conf,module_name,condition=''): + msg='Python module %s'%module_name + if condition: + msg='%s (%s)'%(msg,condition) + conf.start_msg(msg) + try: + ret=conf.cmd_and_log(conf.env['PYTHON']+['-c',PYTHON_MODULE_TEMPLATE%module_name]) + except Exception: + conf.end_msg(False) + conf.fatal('Could not find the python module %r'%module_name) + ret=ret.strip() + if condition: + conf.end_msg(ret) + if ret=='unknown version': + conf.fatal('Could not check the %s version'%module_name) + from distutils.version import LooseVersion + def num(*k): + if isinstance(k[0],int): + return LooseVersion('.'.join([str(x)for x in k])) + else: + return LooseVersion(k[0]) + d={'num':num,'ver':LooseVersion(ret)} + ev=eval(condition,{},d) + if not ev: + conf.fatal('The %s version does not satisfy the requirements'%module_name) + else: + if ret=='unknown version': + conf.end_msg(True) + else: + conf.end_msg(ret) +def configure(conf): + try: + conf.find_program('python',var='PYTHON') + except conf.errors.ConfigurationError: + Logs.warn("could not find a python executable, setting to sys.executable '%s'"%sys.executable) + conf.env.PYTHON=sys.executable + if conf.env.PYTHON!=sys.executable: + Logs.warn("python executable %r differs from system %r"%(conf.env.PYTHON,sys.executable)) + conf.env.PYTHON=conf.cmd_to_list(conf.env.PYTHON) + v=conf.env + v['PYCMD']='"import sys, py_compile;py_compile.compile(sys.argv[1], sys.argv[2])"' + v['PYFLAGS']='' + v['PYFLAGS_OPT']='-O' + v['PYC']=getattr(Options.options,'pyc',1) + v['PYO']=getattr(Options.options,'pyo',1) +def options(opt): + opt.add_option('--nopyc',action='store_false',default=1,help='Do not install bytecode compiled .pyc files (configuration) [Default:install]',dest='pyc') + opt.add_option('--nopyo',action='store_false',default=1,help='Do not install optimised compiled .pyo files (configuration) [Default:install]',dest='pyo') diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/qt4.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/qt4.py new file mode 100644 index 0000000..7926a89 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/qt4.py @@ -0,0 +1,455 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +try: + from xml.sax import make_parser + from xml.sax.handler import ContentHandler +except ImportError: + has_xml=False + ContentHandler=object +else: + has_xml=True +import os,sys +from waflib.Tools import c_preproc,cxx +from waflib import Task,Utils,Options,Errors +from waflib.TaskGen import feature,after_method,extension +from waflib.Configure import conf +from waflib import Logs +MOC_H=['.h','.hpp','.hxx','.hh'] +EXT_RCC=['.qrc'] +EXT_UI=['.ui'] +EXT_QT4=['.cpp','.cc','.cxx','.C'] +QT4_LIBS="QtCore QtGui QtUiTools QtNetwork QtOpenGL QtSql QtSvg QtTest QtXml QtXmlPatterns QtWebKit Qt3Support QtHelp QtScript QtDeclarative QtDesigner" +class qxx(Task.classes['cxx']): + def __init__(self,*k,**kw): + Task.Task.__init__(self,*k,**kw) + self.moc_done=0 + def scan(self): + (nodes,names)=c_preproc.scan(self) + lst=[] + for x in nodes: + if x.name.endswith('.moc'): + s=x.path_from(self.inputs[0].parent.get_bld()) + if s not in names: + names.append(s) + else: + lst.append(x) + return(lst,names) + def runnable_status(self): + if self.moc_done: + return Task.Task.runnable_status(self) + else: + for t in self.run_after: + if not t.hasrun: + return Task.ASK_LATER + self.add_moc_tasks() + return Task.Task.runnable_status(self) + def create_moc_task(self,h_node,m_node): + try: + moc_cache=self.generator.bld.moc_cache + except AttributeError: + moc_cache=self.generator.bld.moc_cache={} + try: + return moc_cache[h_node] + except KeyError: + tsk=moc_cache[h_node]=Task.classes['moc'](env=self.env,generator=self.generator) + tsk.set_inputs(h_node) + tsk.set_outputs(m_node) + gen=self.generator.bld.producer + gen.outstanding.insert(0,tsk) + gen.total+=1 + return tsk + def add_moc_tasks(self): + node=self.inputs[0] + bld=self.generator.bld + try: + self.signature() + except KeyError: + pass + else: + delattr(self,'cache_sig') + moctasks=[] + mocfiles=[] + try: + tmp_lst=bld.raw_deps[self.uid()] + bld.raw_deps[self.uid()]=[] + except KeyError: + tmp_lst=[] + for d in tmp_lst: + if not d.endswith('.moc'): + continue + if d in mocfiles: + Logs.error("paranoia owns") + continue + mocfiles.append(d) + h_node=None + try:ext=Options.options.qt_header_ext.split() + except AttributeError:pass + if not ext:ext=MOC_H + base2=d[:-4] + for x in[node.parent]+self.generator.includes_nodes: + for e in ext: + h_node=x.find_node(base2+e) + if h_node: + break + if h_node: + m_node=h_node.change_ext('.moc') + break + else: + for k in EXT_QT4: + if base2.endswith(k): + for x in[node.parent]+self.generator.includes_nodes: + h_node=x.find_node(base2) + if h_node: + break + if h_node: + m_node=h_node.change_ext(k+'.moc') + break + if not h_node: + raise Errors.WafError('no header found for %r which is a moc file'%d) + bld.node_deps[(self.inputs[0].parent.abspath(),m_node.name)]=h_node + task=self.create_moc_task(h_node,m_node) + moctasks.append(task) + tmp_lst=bld.raw_deps[self.uid()]=mocfiles + lst=bld.node_deps.get(self.uid(),()) + for d in lst: + name=d.name + if name.endswith('.moc'): + task=self.create_moc_task(bld.node_deps[(self.inputs[0].parent.abspath(),name)],d) + moctasks.append(task) + self.run_after.update(set(moctasks)) + self.moc_done=1 + run=Task.classes['cxx'].__dict__['run'] +class trans_update(Task.Task): + run_str='${QT_LUPDATE} ${SRC} -ts ${TGT}' + color='BLUE' +Task.update_outputs(trans_update) +class XMLHandler(ContentHandler): + def __init__(self): + self.buf=[] + self.files=[] + def startElement(self,name,attrs): + if name=='file': + self.buf=[] + def endElement(self,name): + if name=='file': + self.files.append(str(''.join(self.buf))) + def characters(self,cars): + self.buf.append(cars) +@extension(*EXT_RCC) +def create_rcc_task(self,node): + rcnode=node.change_ext('_rc.cpp') + rcctask=self.create_task('rcc',node,rcnode) + cpptask=self.create_task('cxx',rcnode,rcnode.change_ext('.o')) + try: + self.compiled_tasks.append(cpptask) + except AttributeError: + self.compiled_tasks=[cpptask] + return cpptask +@extension(*EXT_UI) +def create_uic_task(self,node): + uictask=self.create_task('ui4',node) + uictask.outputs=[self.path.find_or_declare(self.env['ui_PATTERN']%node.name[:-3])] +@extension('.ts') +def add_lang(self,node): + self.lang=self.to_list(getattr(self,'lang',[]))+[node] +@feature('qt4') +@after_method('apply_link') +def apply_qt4(self): + if getattr(self,'lang',None): + qmtasks=[] + for x in self.to_list(self.lang): + if isinstance(x,str): + x=self.path.find_resource(x+'.ts') + qmtasks.append(self.create_task('ts2qm',x,x.change_ext('.qm'))) + if getattr(self,'update',None)and Options.options.trans_qt4: + cxxnodes=[a.inputs[0]for a in self.compiled_tasks]+[a.inputs[0]for a in self.tasks if getattr(a,'inputs',None)and a.inputs[0].name.endswith('.ui')] + for x in qmtasks: + self.create_task('trans_update',cxxnodes,x.inputs) + if getattr(self,'langname',None): + qmnodes=[x.outputs[0]for x in qmtasks] + rcnode=self.langname + if isinstance(rcnode,str): + rcnode=self.path.find_or_declare(rcnode+'.qrc') + t=self.create_task('qm2rcc',qmnodes,rcnode) + k=create_rcc_task(self,t.outputs[0]) + self.link_task.inputs.append(k.outputs[0]) + lst=[] + for flag in self.to_list(self.env['CXXFLAGS']): + if len(flag)<2:continue + f=flag[0:2] + if f in['-D','-I','/D','/I']: + if(f[0]=='/'): + lst.append('-'+flag[1:]) + else: + lst.append(flag) + self.env.append_value('MOC_FLAGS',lst) +@extension(*EXT_QT4) +def cxx_hook(self,node): + return self.create_compiled_task('qxx',node) +class rcc(Task.Task): + color='BLUE' + run_str='${QT_RCC} -name ${SRC[0].name} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}' + ext_out=['.h'] + def scan(self): + node=self.inputs[0] + if not has_xml: + Logs.error('no xml support was found, the rcc dependencies will be incomplete!') + return([],[]) + parser=make_parser() + curHandler=XMLHandler() + parser.setContentHandler(curHandler) + fi=open(self.inputs[0].abspath(),'r') + try: + parser.parse(fi) + finally: + fi.close() + nodes=[] + names=[] + root=self.inputs[0].parent + for x in curHandler.files: + nd=root.find_resource(x) + if nd:nodes.append(nd) + else:names.append(x) + return(nodes,names) +class moc(Task.Task): + color='BLUE' + run_str='${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}' +class ui4(Task.Task): + color='BLUE' + run_str='${QT_UIC} ${SRC} -o ${TGT}' + ext_out=['.h'] +class ts2qm(Task.Task): + color='BLUE' + run_str='${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}' +class qm2rcc(Task.Task): + color='BLUE' + after='ts2qm' + def run(self): + txt='\n'.join(['<file>%s</file>'%k.path_from(self.outputs[0].parent)for k in self.inputs]) + code='<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n%s\n</qresource>\n</RCC>'%txt + self.outputs[0].write(code) +def configure(self): + self.find_qt4_binaries() + self.set_qt4_libs_to_check() + self.set_qt4_defines() + self.find_qt4_libraries() + self.add_qt4_rpath() + self.simplify_qt4_libs() +@conf +def find_qt4_binaries(self): + env=self.env + opt=Options.options + qtdir=getattr(opt,'qtdir','') + qtbin=getattr(opt,'qtbin','') + paths=[] + if qtdir: + qtbin=os.path.join(qtdir,'bin') + if not qtdir: + qtdir=os.environ.get('QT4_ROOT','') + qtbin=os.environ.get('QT4_BIN',None)or os.path.join(qtdir,'bin') + if qtbin: + paths=[qtbin] + if not qtdir: + paths=os.environ.get('PATH','').split(os.pathsep) + paths.append('/usr/share/qt4/bin/') + try: + lst=Utils.listdir('/usr/local/Trolltech/') + except OSError: + pass + else: + if lst: + lst.sort() + lst.reverse() + qtdir='/usr/local/Trolltech/%s/'%lst[0] + qtbin=os.path.join(qtdir,'bin') + paths.append(qtbin) + cand=None + prev_ver=['4','0','0'] + for qmk in['qmake-qt4','qmake4','qmake']: + try: + qmake=self.find_program(qmk,path_list=paths) + except self.errors.ConfigurationError: + pass + else: + try: + version=self.cmd_and_log([qmake,'-query','QT_VERSION']).strip() + except self.errors.WafError: + pass + else: + if version: + new_ver=version.split('.') + if new_ver>prev_ver: + cand=qmake + prev_ver=new_ver + if cand: + self.env.QMAKE=cand + else: + self.fatal('Could not find qmake for qt4') + qtbin=self.cmd_and_log([self.env.QMAKE,'-query','QT_INSTALL_BINS']).strip()+os.sep + def find_bin(lst,var): + if var in env: + return + for f in lst: + try: + ret=self.find_program(f,path_list=paths) + except self.errors.ConfigurationError: + pass + else: + env[var]=ret + break + find_bin(['uic-qt3','uic3'],'QT_UIC3') + find_bin(['uic-qt4','uic'],'QT_UIC') + if not env['QT_UIC']: + self.fatal('cannot find the uic compiler for qt4') + try: + uicver=self.cmd_and_log(env['QT_UIC']+" -version 2>&1").strip() + except self.errors.ConfigurationError: + self.fatal('this uic compiler is for qt3, add uic for qt4 to your path') + uicver=uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt','') + self.msg('Checking for uic version','%s'%uicver) + if uicver.find(' 3.')!=-1: + self.fatal('this uic compiler is for qt3, add uic for qt4 to your path') + find_bin(['moc-qt4','moc'],'QT_MOC') + find_bin(['rcc-qt4','rcc'],'QT_RCC') + find_bin(['lrelease-qt4','lrelease'],'QT_LRELEASE') + find_bin(['lupdate-qt4','lupdate'],'QT_LUPDATE') + env['UIC3_ST']='%s -o %s' + env['UIC_ST']='%s -o %s' + env['MOC_ST']='-o' + env['ui_PATTERN']='ui_%s.h' + env['QT_LRELEASE_FLAGS']=['-silent'] + env.MOCCPPPATH_ST='-I%s' + env.MOCDEFINES_ST='-D%s' +@conf +def find_qt4_libraries(self): + qtlibs=getattr(Options.options,'qtlibs',None)or os.environ.get("QT4_LIBDIR",None) + if not qtlibs: + try: + qtlibs=self.cmd_and_log([self.env.QMAKE,'-query','QT_INSTALL_LIBS']).strip() + except Errors.WafError: + qtdir=self.cmd_and_log([self.env.QMAKE,'-query','QT_INSTALL_PREFIX']).strip()+os.sep + qtlibs=os.path.join(qtdir,'lib') + self.msg('Found the Qt4 libraries in',qtlibs) + qtincludes=os.environ.get("QT4_INCLUDES",None)or self.cmd_and_log([self.env.QMAKE,'-query','QT_INSTALL_HEADERS']).strip() + env=self.env + if not'PKG_CONFIG_PATH'in os.environ: + os.environ['PKG_CONFIG_PATH']='%s:%s/pkgconfig:/usr/lib/qt4/lib/pkgconfig:/opt/qt4/lib/pkgconfig:/usr/lib/qt4/lib:/opt/qt4/lib'%(qtlibs,qtlibs) + try: + if os.environ.get("QT4_XCOMPILE",None): + raise self.errors.ConfigurationError() + self.check_cfg(atleast_pkgconfig_version='0.1') + except self.errors.ConfigurationError: + for i in self.qt4_vars: + uselib=i.upper() + if Utils.unversioned_sys_platform()=="darwin": + frameworkName=i+".framework" + qtDynamicLib=os.path.join(qtlibs,frameworkName,i) + if os.path.exists(qtDynamicLib): + env.append_unique('FRAMEWORK_'+uselib,i) + self.msg('Checking for %s'%i,qtDynamicLib,'GREEN') + else: + self.msg('Checking for %s'%i,False,'YELLOW') + env.append_unique('INCLUDES_'+uselib,os.path.join(qtlibs,frameworkName,'Headers')) + elif env.DEST_OS!="win32": + qtDynamicLib=os.path.join(qtlibs,"lib"+i+".so") + qtStaticLib=os.path.join(qtlibs,"lib"+i+".a") + if os.path.exists(qtDynamicLib): + env.append_unique('LIB_'+uselib,i) + self.msg('Checking for %s'%i,qtDynamicLib,'GREEN') + elif os.path.exists(qtStaticLib): + env.append_unique('LIB_'+uselib,i) + self.msg('Checking for %s'%i,qtStaticLib,'GREEN') + else: + self.msg('Checking for %s'%i,False,'YELLOW') + env.append_unique('LIBPATH_'+uselib,qtlibs) + env.append_unique('INCLUDES_'+uselib,qtincludes) + env.append_unique('INCLUDES_'+uselib,os.path.join(qtincludes,i)) + else: + for k in("lib%s.a","lib%s4.a","%s.lib","%s4.lib"): + lib=os.path.join(qtlibs,k%i) + if os.path.exists(lib): + env.append_unique('LIB_'+uselib,i+k[k.find("%s")+2:k.find('.')]) + self.msg('Checking for %s'%i,lib,'GREEN') + break + else: + self.msg('Checking for %s'%i,False,'YELLOW') + env.append_unique('LIBPATH_'+uselib,qtlibs) + env.append_unique('INCLUDES_'+uselib,qtincludes) + env.append_unique('INCLUDES_'+uselib,os.path.join(qtincludes,i)) + uselib=i.upper()+"_debug" + for k in("lib%sd.a","lib%sd4.a","%sd.lib","%sd4.lib"): + lib=os.path.join(qtlibs,k%i) + if os.path.exists(lib): + env.append_unique('LIB_'+uselib,i+k[k.find("%s")+2:k.find('.')]) + self.msg('Checking for %s'%i,lib,'GREEN') + break + else: + self.msg('Checking for %s'%i,False,'YELLOW') + env.append_unique('LIBPATH_'+uselib,qtlibs) + env.append_unique('INCLUDES_'+uselib,qtincludes) + env.append_unique('INCLUDES_'+uselib,os.path.join(qtincludes,i)) + else: + for i in self.qt4_vars_debug+self.qt4_vars: + self.check_cfg(package=i,args='--cflags --libs',mandatory=False) +@conf +def simplify_qt4_libs(self): + env=self.env + def process_lib(vars_,coreval): + for d in vars_: + var=d.upper() + if var=='QTCORE': + continue + value=env['LIBPATH_'+var] + if value: + core=env[coreval] + accu=[] + for lib in value: + if lib in core: + continue + accu.append(lib) + env['LIBPATH_'+var]=accu + process_lib(self.qt4_vars,'LIBPATH_QTCORE') + process_lib(self.qt4_vars_debug,'LIBPATH_QTCORE_DEBUG') +@conf +def add_qt4_rpath(self): + env=self.env + if getattr(Options.options,'want_rpath',False): + def process_rpath(vars_,coreval): + for d in vars_: + var=d.upper() + value=env['LIBPATH_'+var] + if value: + core=env[coreval] + accu=[] + for lib in value: + if var!='QTCORE': + if lib in core: + continue + accu.append('-Wl,--rpath='+lib) + env['RPATH_'+var]=accu + process_rpath(self.qt4_vars,'LIBPATH_QTCORE') + process_rpath(self.qt4_vars_debug,'LIBPATH_QTCORE_DEBUG') +@conf +def set_qt4_libs_to_check(self): + if not hasattr(self,'qt4_vars'): + self.qt4_vars=QT4_LIBS + self.qt4_vars=Utils.to_list(self.qt4_vars) + if not hasattr(self,'qt4_vars_debug'): + self.qt4_vars_debug=[a+'_debug'for a in self.qt4_vars] + self.qt4_vars_debug=Utils.to_list(self.qt4_vars_debug) +@conf +def set_qt4_defines(self): + if sys.platform!='win32': + return + for x in self.qt4_vars: + y=x[2:].upper() + self.env.append_unique('DEFINES_%s'%x.upper(),'QT_%s_LIB'%y) + self.env.append_unique('DEFINES_%s_DEBUG'%x.upper(),'QT_%s_LIB'%y) +def options(opt): + opt.add_option('--want-rpath',action='store_true',default=False,dest='want_rpath',help='enable the rpath for qt libraries') + opt.add_option('--header-ext',type='string',default='',help='header extension for moc files',dest='qt_header_ext') + for i in'qtdir qtbin qtlibs'.split(): + opt.add_option('--'+i,type='string',default='',dest=i) + opt.add_option('--translate',action="store_true",help="collect translation strings",dest="trans_qt4",default=False) diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/ruby.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/ruby.py new file mode 100644 index 0000000..04cddfb --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/ruby.py @@ -0,0 +1,103 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os +from waflib import Task,Options,Utils +from waflib.TaskGen import before_method,feature,after_method,Task,extension +from waflib.Configure import conf +@feature('rubyext') +@before_method('apply_incpaths','apply_lib_vars','apply_bundle','apply_link') +def init_rubyext(self): + self.install_path='${ARCHDIR_RUBY}' + self.uselib=self.to_list(getattr(self,'uselib','')) + if not'RUBY'in self.uselib: + self.uselib.append('RUBY') + if not'RUBYEXT'in self.uselib: + self.uselib.append('RUBYEXT') +@feature('rubyext') +@before_method('apply_link','propagate_uselib') +def apply_ruby_so_name(self): + self.env['cshlib_PATTERN']=self.env['cxxshlib_PATTERN']=self.env['rubyext_PATTERN'] +@conf +def check_ruby_version(self,minver=()): + if Options.options.rubybinary: + self.env.RUBY=Options.options.rubybinary + else: + self.find_program('ruby',var='RUBY') + ruby=self.env.RUBY + try: + version=self.cmd_and_log([ruby,'-e','puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip() + except Exception: + self.fatal('could not determine ruby version') + self.env.RUBY_VERSION=version + try: + ver=tuple(map(int,version.split("."))) + except Exception: + self.fatal('unsupported ruby version %r'%version) + cver='' + if minver: + if ver<minver: + self.fatal('ruby is too old %r'%ver) + cver='.'.join([str(x)for x in minver]) + else: + cver=ver + self.msg('Checking for ruby version %s'%str(minver or''),cver) +@conf +def check_ruby_ext_devel(self): + if not self.env.RUBY: + self.fatal('ruby detection is required first') + if not self.env.CC_NAME and not self.env.CXX_NAME: + self.fatal('load a c/c++ compiler first') + version=tuple(map(int,self.env.RUBY_VERSION.split("."))) + def read_out(cmd): + return Utils.to_list(self.cmd_and_log([self.env.RUBY,'-rrbconfig','-e',cmd])) + def read_config(key): + return read_out('puts Config::CONFIG[%r]'%key) + ruby=self.env['RUBY'] + archdir=read_config('archdir') + cpppath=archdir + if version>=(1,9,0): + ruby_hdrdir=read_config('rubyhdrdir') + cpppath+=ruby_hdrdir + cpppath+=[os.path.join(ruby_hdrdir[0],read_config('arch')[0])] + self.check(header_name='ruby.h',includes=cpppath,errmsg='could not find ruby header file') + self.env.LIBPATH_RUBYEXT=read_config('libdir') + self.env.LIBPATH_RUBYEXT+=archdir + self.env.INCLUDES_RUBYEXT=cpppath + self.env.CFLAGS_RUBYEXT=read_config('CCDLFLAGS') + self.env.rubyext_PATTERN='%s.'+read_config('DLEXT')[0] + flags=read_config('LDSHARED') + while flags and flags[0][0]!='-': + flags=flags[1:] + if len(flags)>1 and flags[1]=="ppc": + flags=flags[2:] + self.env.LINKFLAGS_RUBYEXT=flags + self.env.LINKFLAGS_RUBYEXT+=read_config('LIBS') + self.env.LINKFLAGS_RUBYEXT+=read_config('LIBRUBYARG_SHARED') + if Options.options.rubyarchdir: + self.env.ARCHDIR_RUBY=Options.options.rubyarchdir + else: + self.env.ARCHDIR_RUBY=read_config('sitearchdir')[0] + if Options.options.rubylibdir: + self.env.LIBDIR_RUBY=Options.options.rubylibdir + else: + self.env.LIBDIR_RUBY=read_config('sitelibdir')[0] +@conf +def check_ruby_module(self,module_name): + self.start_msg('Ruby module %s'%module_name) + try: + self.cmd_and_log([self.env['RUBY'],'-e','require \'%s\';puts 1'%module_name]) + except Exception: + self.end_msg(False) + self.fatal('Could not find the ruby module %r'%module_name) + self.end_msg(True) +@extension('.rb') +def process(self,node): + tsk=self.create_task('run_ruby',node) +class run_ruby(Task.Task): + run_str='${RUBY} ${RBFLAGS} -I ${SRC[0].parent.abspath()} ${SRC}' +def options(opt): + opt.add_option('--with-ruby-archdir',type='string',dest='rubyarchdir',help='Specify directory where to install arch specific files') + opt.add_option('--with-ruby-libdir',type='string',dest='rubylibdir',help='Specify alternate ruby library path') + opt.add_option('--with-ruby-binary',type='string',dest='rubybinary',help='Specify alternate ruby binary') diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/suncc.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/suncc.py new file mode 100644 index 0000000..48f4943 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/suncc.py @@ -0,0 +1,54 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os +from waflib import Utils +from waflib.Tools import ccroot,ar +from waflib.Configure import conf +@conf +def find_scc(conf): + v=conf.env + cc=None + if v['CC']:cc=v['CC'] + elif'CC'in conf.environ:cc=conf.environ['CC'] + if not cc:cc=conf.find_program('cc',var='CC') + if not cc:conf.fatal('Could not find a Sun C compiler') + cc=conf.cmd_to_list(cc) + try: + conf.cmd_and_log(cc+['-flags']) + except Exception: + conf.fatal('%r is not a Sun compiler'%cc) + v['CC']=cc + v['CC_NAME']='sun' + conf.get_suncc_version(cc) +@conf +def scc_common_flags(conf): + v=conf.env + v['CC_SRC_F']=[] + v['CC_TGT_F']=['-c','-o'] + if not v['LINK_CC']:v['LINK_CC']=v['CC'] + v['CCLNK_SRC_F']='' + v['CCLNK_TGT_F']=['-o'] + v['CPPPATH_ST']='-I%s' + v['DEFINES_ST']='-D%s' + v['LIB_ST']='-l%s' + v['LIBPATH_ST']='-L%s' + v['STLIB_ST']='-l%s' + v['STLIBPATH_ST']='-L%s' + v['SONAME_ST']='-Wl,-h,%s' + v['SHLIB_MARKER']='-Bdynamic' + v['STLIB_MARKER']='-Bstatic' + v['cprogram_PATTERN']='%s' + v['CFLAGS_cshlib']=['-Kpic','-DPIC'] + v['LINKFLAGS_cshlib']=['-G'] + v['cshlib_PATTERN']='lib%s.so' + v['LINKFLAGS_cstlib']=['-Bstatic'] + v['cstlib_PATTERN']='lib%s.a' +def configure(conf): + conf.find_scc() + conf.find_ar() + conf.scc_common_flags() + conf.cc_load_tools() + conf.cc_add_flags() + conf.link_add_flags() diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/suncxx.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/suncxx.py new file mode 100644 index 0000000..1736c2d --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/suncxx.py @@ -0,0 +1,55 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os +from waflib import Utils +from waflib.Tools import ccroot,ar +from waflib.Configure import conf +@conf +def find_sxx(conf): + v=conf.env + cc=None + if v['CXX']:cc=v['CXX'] + elif'CXX'in conf.environ:cc=conf.environ['CXX'] + if not cc:cc=conf.find_program('CC',var='CXX') + if not cc:cc=conf.find_program('c++',var='CXX') + if not cc:conf.fatal('Could not find a Sun C++ compiler') + cc=conf.cmd_to_list(cc) + try: + conf.cmd_and_log(cc+['-flags']) + except Exception: + conf.fatal('%r is not a Sun compiler'%cc) + v['CXX']=cc + v['CXX_NAME']='sun' + conf.get_suncc_version(cc) +@conf +def sxx_common_flags(conf): + v=conf.env + v['CXX_SRC_F']=[] + v['CXX_TGT_F']=['-c','-o'] + if not v['LINK_CXX']:v['LINK_CXX']=v['CXX'] + v['CXXLNK_SRC_F']=[] + v['CXXLNK_TGT_F']=['-o'] + v['CPPPATH_ST']='-I%s' + v['DEFINES_ST']='-D%s' + v['LIB_ST']='-l%s' + v['LIBPATH_ST']='-L%s' + v['STLIB_ST']='-l%s' + v['STLIBPATH_ST']='-L%s' + v['SONAME_ST']='-Wl,-h,%s' + v['SHLIB_MARKER']='-Bdynamic' + v['STLIB_MARKER']='-Bstatic' + v['cxxprogram_PATTERN']='%s' + v['CXXFLAGS_cxxshlib']=['-Kpic','-DPIC'] + v['LINKFLAGS_cxxshlib']=['-G'] + v['cxxshlib_PATTERN']='lib%s.so' + v['LINKFLAGS_cxxstlib']=['-Bstatic'] + v['cxxstlib_PATTERN']='lib%s.a' +def configure(conf): + conf.find_sxx() + conf.find_ar() + conf.sxx_common_flags() + conf.cxx_load_tools() + conf.cxx_add_flags() + conf.link_add_flags() diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/tex.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/tex.py new file mode 100644 index 0000000..7b17923 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/tex.py @@ -0,0 +1,253 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,re +from waflib import Utils,Task,Errors,Logs +from waflib.TaskGen import feature,before_method +re_bibunit=re.compile(r'\\(?P<type>putbib)\[(?P<file>[^\[\]]*)\]',re.M) +def bibunitscan(self): + node=self.inputs[0] + nodes=[] + if not node:return nodes + code=node.read() + for match in re_bibunit.finditer(code): + path=match.group('file') + if path: + for k in['','.bib']: + Logs.debug('tex: trying %s%s'%(path,k)) + fi=node.parent.find_resource(path+k) + if fi: + nodes.append(fi) + else: + Logs.debug('tex: could not find %s'%path) + Logs.debug("tex: found the following bibunit files: %s"%nodes) + return nodes +exts_deps_tex=['','.ltx','.tex','.bib','.pdf','.png','.eps','.ps'] +exts_tex=['.ltx','.tex'] +re_tex=re.compile(r'\\(?P<type>include|bibliography|putbib|includegraphics|input|import|bringin|lstinputlisting)(\[[^\[\]]*\])?{(?P<file>[^{}]*)}',re.M) +g_bibtex_re=re.compile('bibdata',re.M) +class tex(Task.Task): + bibtex_fun,_=Task.compile_fun('${BIBTEX} ${BIBTEXFLAGS} ${SRCFILE}',shell=False) + bibtex_fun.__doc__=""" + Execute the program **bibtex** + """ + makeindex_fun,_=Task.compile_fun('${MAKEINDEX} ${MAKEINDEXFLAGS} ${SRCFILE}',shell=False) + makeindex_fun.__doc__=""" + Execute the program **makeindex** + """ + def exec_command(self,cmd,**kw): + bld=self.generator.bld + try: + if not kw.get('cwd',None): + kw['cwd']=bld.cwd + except AttributeError: + bld.cwd=kw['cwd']=bld.variant_dir + return Utils.subprocess.Popen(cmd,**kw).wait() + def scan_aux(self,node): + nodes=[node] + re_aux=re.compile(r'\\@input{(?P<file>[^{}]*)}',re.M) + def parse_node(node): + code=node.read() + for match in re_aux.finditer(code): + path=match.group('file') + found=node.parent.find_or_declare(path) + if found and found not in nodes: + Logs.debug('tex: found aux node '+found.abspath()) + nodes.append(found) + parse_node(found) + parse_node(node) + return nodes + def scan(self): + node=self.inputs[0] + nodes=[] + names=[] + seen=[] + if not node:return(nodes,names) + def parse_node(node): + if node in seen: + return + seen.append(node) + code=node.read() + global re_tex + for match in re_tex.finditer(code): + for path in match.group('file').split(','): + if path: + add_name=True + found=None + for k in exts_deps_tex: + Logs.debug('tex: trying %s%s'%(path,k)) + found=node.parent.find_resource(path+k) + for tsk in self.generator.tasks: + if not found or found in tsk.outputs: + break + else: + nodes.append(found) + add_name=False + for ext in exts_tex: + if found.name.endswith(ext): + parse_node(found) + break + if add_name: + names.append(path) + parse_node(node) + for x in nodes: + x.parent.get_bld().mkdir() + Logs.debug("tex: found the following : %s and names %s"%(nodes,names)) + return(nodes,names) + def check_status(self,msg,retcode): + if retcode!=0: + raise Errors.WafError("%r command exit status %r"%(msg,retcode)) + def bibfile(self): + for aux_node in self.aux_nodes: + try: + ct=aux_node.read() + except(OSError,IOError): + Logs.error('Error reading %s: %r'%aux_node.abspath()) + continue + if g_bibtex_re.findall(ct): + Logs.warn('calling bibtex') + self.env.env={} + self.env.env.update(os.environ) + self.env.env.update({'BIBINPUTS':self.TEXINPUTS,'BSTINPUTS':self.TEXINPUTS}) + self.env.SRCFILE=aux_node.name[:-4] + self.check_status('error when calling bibtex',self.bibtex_fun()) + def bibunits(self): + try: + bibunits=bibunitscan(self) + except OSError: + Logs.error('error bibunitscan') + else: + if bibunits: + fn=['bu'+str(i)for i in xrange(1,len(bibunits)+1)] + if fn: + Logs.warn('calling bibtex on bibunits') + for f in fn: + self.env.env={'BIBINPUTS':self.TEXINPUTS,'BSTINPUTS':self.TEXINPUTS} + self.env.SRCFILE=f + self.check_status('error when calling bibtex',self.bibtex_fun()) + def makeindex(self): + try: + idx_path=self.idx_node.abspath() + os.stat(idx_path) + except OSError: + Logs.warn('index file %s absent, not calling makeindex'%idx_path) + else: + Logs.warn('calling makeindex') + self.env.SRCFILE=self.idx_node.name + self.env.env={} + self.check_status('error when calling makeindex %s'%idx_path,self.makeindex_fun()) + def bibtopic(self): + p=self.inputs[0].parent.get_bld() + if os.path.exists(os.path.join(p.abspath(),'btaux.aux')): + self.aux_nodes+=p.ant_glob('*[0-9].aux') + def run(self): + env=self.env + if not env['PROMPT_LATEX']: + env.append_value('LATEXFLAGS','-interaction=batchmode') + env.append_value('PDFLATEXFLAGS','-interaction=batchmode') + env.append_value('XELATEXFLAGS','-interaction=batchmode') + fun=self.texfun + node=self.inputs[0] + srcfile=node.abspath() + texinputs=self.env.TEXINPUTS or'' + self.TEXINPUTS=node.parent.get_bld().abspath()+os.pathsep+node.parent.get_src().abspath()+os.pathsep+texinputs+os.pathsep + self.cwd=self.inputs[0].parent.get_bld().abspath() + Logs.warn('first pass on %s'%self.__class__.__name__) + self.env.env={} + self.env.env.update(os.environ) + self.env.env.update({'TEXINPUTS':self.TEXINPUTS}) + self.env.SRCFILE=srcfile + self.check_status('error when calling latex',fun()) + self.aux_nodes=self.scan_aux(node.change_ext('.aux')) + self.idx_node=node.change_ext('.idx') + self.bibtopic() + self.bibfile() + self.bibunits() + self.makeindex() + hash='' + for i in range(10): + prev_hash=hash + try: + hashes=[Utils.h_file(x.abspath())for x in self.aux_nodes] + hash=Utils.h_list(hashes) + except(OSError,IOError): + Logs.error('could not read aux.h') + pass + if hash and hash==prev_hash: + break + Logs.warn('calling %s'%self.__class__.__name__) + self.env.env={} + self.env.env.update(os.environ) + self.env.env.update({'TEXINPUTS':self.TEXINPUTS}) + self.env.SRCFILE=srcfile + self.check_status('error when calling %s'%self.__class__.__name__,fun()) +class latex(tex): + texfun,vars=Task.compile_fun('${LATEX} ${LATEXFLAGS} ${SRCFILE}',shell=False) +class pdflatex(tex): + texfun,vars=Task.compile_fun('${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}',shell=False) +class xelatex(tex): + texfun,vars=Task.compile_fun('${XELATEX} ${XELATEXFLAGS} ${SRCFILE}',shell=False) +class dvips(Task.Task): + run_str='${DVIPS} ${DVIPSFLAGS} ${SRC} -o ${TGT}' + color='BLUE' + after=['latex','pdflatex','xelatex'] +class dvipdf(Task.Task): + run_str='${DVIPDF} ${DVIPDFFLAGS} ${SRC} ${TGT}' + color='BLUE' + after=['latex','pdflatex','xelatex'] +class pdf2ps(Task.Task): + run_str='${PDF2PS} ${PDF2PSFLAGS} ${SRC} ${TGT}' + color='BLUE' + after=['latex','pdflatex','xelatex'] +@feature('tex') +@before_method('process_source') +def apply_tex(self): + if not getattr(self,'type',None)in['latex','pdflatex','xelatex']: + self.type='pdflatex' + tree=self.bld + outs=Utils.to_list(getattr(self,'outs',[])) + self.env['PROMPT_LATEX']=getattr(self,'prompt',1) + deps_lst=[] + if getattr(self,'deps',None): + deps=self.to_list(self.deps) + for filename in deps: + n=self.path.find_resource(filename) + if not n: + self.bld.fatal('Could not find %r for %r'%(filename,self)) + if not n in deps_lst: + deps_lst.append(n) + for node in self.to_nodes(self.source): + if self.type=='latex': + task=self.create_task('latex',node,node.change_ext('.dvi')) + elif self.type=='pdflatex': + task=self.create_task('pdflatex',node,node.change_ext('.pdf')) + elif self.type=='xelatex': + task=self.create_task('xelatex',node,node.change_ext('.pdf')) + task.env=self.env + if deps_lst: + for n in deps_lst: + if not n in task.dep_nodes: + task.dep_nodes.append(n) + v=dict(os.environ) + p=node.parent.abspath()+os.pathsep+self.path.abspath()+os.pathsep+self.path.get_bld().abspath()+os.pathsep+v.get('TEXINPUTS','')+os.pathsep + v['TEXINPUTS']=p + if self.type=='latex': + if'ps'in outs: + tsk=self.create_task('dvips',task.outputs,node.change_ext('.ps')) + tsk.env.env=dict(v) + if'pdf'in outs: + tsk=self.create_task('dvipdf',task.outputs,node.change_ext('.pdf')) + tsk.env.env=dict(v) + elif self.type=='pdflatex': + if'ps'in outs: + self.create_task('pdf2ps',task.outputs,node.change_ext('.ps')) + self.source=[] +def configure(self): + v=self.env + for p in'tex latex pdflatex xelatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps'.split(): + try: + self.find_program(p,var=p.upper()) + except self.errors.ConfigurationError: + pass + v['DVIPSFLAGS']='-Ppdf' diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/vala.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/vala.py new file mode 100644 index 0000000..96248c1 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/vala.py @@ -0,0 +1,201 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os.path,shutil,re +from waflib import Context,Task,Utils,Logs,Options,Errors +from waflib.TaskGen import extension,taskgen_method +from waflib.Configure import conf +class valac(Task.Task): + vars=["VALAC","VALAC_VERSION","VALAFLAGS"] + ext_out=['.h'] + def run(self): + cmd=[self.env['VALAC']]+self.env['VALAFLAGS'] + cmd.extend([a.abspath()for a in self.inputs]) + ret=self.exec_command(cmd,cwd=self.outputs[0].parent.abspath()) + if ret: + return ret + for x in self.outputs: + if id(x.parent)!=id(self.outputs[0].parent): + shutil.move(self.outputs[0].parent.abspath()+os.sep+x.name,x.abspath()) + if self.generator.dump_deps_node: + self.generator.dump_deps_node.write('\n'.join(self.generator.packages)) + return ret +valac=Task.update_outputs(valac) +@taskgen_method +def init_vala_task(self): + self.profile=getattr(self,'profile','gobject') + if self.profile=='gobject': + self.uselib=Utils.to_list(getattr(self,'uselib',[])) + if not'GOBJECT'in self.uselib: + self.uselib.append('GOBJECT') + def addflags(flags): + self.env.append_value('VALAFLAGS',flags) + if self.profile: + addflags('--profile=%s'%self.profile) + if hasattr(self,'threading'): + if self.profile=='gobject': + if not'GTHREAD'in self.uselib: + self.uselib.append('GTHREAD') + else: + Logs.warn("Profile %s means no threading support"%self.profile) + self.threading=False + if self.threading: + addflags('--threading') + valatask=self.valatask + self.is_lib='cprogram'not in self.features + if self.is_lib: + addflags('--library=%s'%self.target) + h_node=self.path.find_or_declare('%s.h'%self.target) + valatask.outputs.append(h_node) + addflags('--header=%s'%h_node.name) + valatask.outputs.append(self.path.find_or_declare('%s.vapi'%self.target)) + if getattr(self,'gir',None): + gir_node=self.path.find_or_declare('%s.gir'%self.gir) + addflags('--gir=%s'%gir_node.name) + valatask.outputs.append(gir_node) + self.vala_target_glib=getattr(self,'vala_target_glib',getattr(Options.options,'vala_target_glib',None)) + if self.vala_target_glib: + addflags('--target-glib=%s'%self.vala_target_glib) + addflags(['--define=%s'%x for x in getattr(self,'vala_defines',[])]) + packages_private=Utils.to_list(getattr(self,'packages_private',[])) + addflags(['--pkg=%s'%x for x in packages_private]) + def _get_api_version(): + api_version='1.0' + if hasattr(Context.g_module,'API_VERSION'): + version=Context.g_module.API_VERSION.split(".") + if version[0]=="0": + api_version="0."+version[1] + else: + api_version=version[0]+".0" + return api_version + self.includes=Utils.to_list(getattr(self,'includes',[])) + self.uselib=self.to_list(getattr(self,'uselib',[])) + valatask.install_path=getattr(self,'install_path','') + valatask.vapi_path=getattr(self,'vapi_path','${DATAROOTDIR}/vala/vapi') + valatask.pkg_name=getattr(self,'pkg_name',self.env['PACKAGE']) + valatask.header_path=getattr(self,'header_path','${INCLUDEDIR}/%s-%s'%(valatask.pkg_name,_get_api_version())) + valatask.install_binding=getattr(self,'install_binding',True) + self.packages=packages=Utils.to_list(getattr(self,'packages',[])) + self.vapi_dirs=vapi_dirs=Utils.to_list(getattr(self,'vapi_dirs',[])) + includes=[] + if hasattr(self,'use'): + local_packages=Utils.to_list(self.use)[:] + seen=[] + while len(local_packages)>0: + package=local_packages.pop() + if package in seen: + continue + seen.append(package) + try: + package_obj=self.bld.get_tgen_by_name(package) + except Errors.WafError: + continue + package_name=package_obj.target + package_node=package_obj.path + package_dir=package_node.path_from(self.path) + for task in package_obj.tasks: + for output in task.outputs: + if output.name==package_name+".vapi": + valatask.set_run_after(task) + if package_name not in packages: + packages.append(package_name) + if package_dir not in vapi_dirs: + vapi_dirs.append(package_dir) + if package_dir not in includes: + includes.append(package_dir) + if hasattr(package_obj,'use'): + lst=self.to_list(package_obj.use) + lst.reverse() + local_packages=[pkg for pkg in lst if pkg not in seen]+local_packages + addflags(['--pkg=%s'%p for p in packages]) + for vapi_dir in vapi_dirs: + v_node=self.path.find_dir(vapi_dir) + if not v_node: + Logs.warn('Unable to locate Vala API directory: %r'%vapi_dir) + else: + addflags('--vapidir=%s'%v_node.abspath()) + addflags('--vapidir=%s'%v_node.get_bld().abspath()) + self.dump_deps_node=None + if self.is_lib and self.packages: + self.dump_deps_node=self.path.find_or_declare('%s.deps'%self.target) + valatask.outputs.append(self.dump_deps_node) + self.includes.append(self.bld.srcnode.abspath()) + self.includes.append(self.bld.bldnode.abspath()) + for include in includes: + try: + self.includes.append(self.path.find_dir(include).abspath()) + self.includes.append(self.path.find_dir(include).get_bld().abspath()) + except AttributeError: + Logs.warn("Unable to locate include directory: '%s'"%include) + if self.is_lib and valatask.install_binding: + headers_list=[o for o in valatask.outputs if o.suffix()==".h"] + try: + self.install_vheader.source=headers_list + except AttributeError: + self.install_vheader=self.bld.install_files(valatask.header_path,headers_list,self.env) + vapi_list=[o for o in valatask.outputs if(o.suffix()in(".vapi",".deps"))] + try: + self.install_vapi.source=vapi_list + except AttributeError: + self.install_vapi=self.bld.install_files(valatask.vapi_path,vapi_list,self.env) + gir_list=[o for o in valatask.outputs if o.suffix()=='.gir'] + try: + self.install_gir.source=gir_list + except AttributeError: + self.install_gir=self.bld.install_files(getattr(self,'gir_path','${DATAROOTDIR}/gir-1.0'),gir_list,self.env) +@extension('.vala','.gs') +def vala_file(self,node): + try: + valatask=self.valatask + except AttributeError: + valatask=self.valatask=self.create_task('valac') + self.init_vala_task() + valatask.inputs.append(node) + c_node=node.change_ext('.c') + valatask.outputs.append(c_node) + self.source.append(c_node) +@conf +def find_valac(self,valac_name,min_version): + valac=self.find_program(valac_name,var='VALAC') + try: + output=self.cmd_and_log(valac+' --version') + except Exception: + valac_version=None + else: + ver=re.search(r'\d+.\d+.\d+',output).group(0).split('.') + valac_version=tuple([int(x)for x in ver]) + self.msg('Checking for %s version >= %r'%(valac_name,min_version),valac_version,valac_version and valac_version>=min_version) + if valac and valac_version<min_version: + self.fatal("%s version %r is too old, need >= %r"%(valac_name,valac_version,min_version)) + self.env['VALAC_VERSION']=valac_version + return valac +@conf +def check_vala(self,min_version=(0,8,0),branch=None): + if not branch: + branch=min_version[:2] + try: + find_valac(self,'valac-%d.%d'%(branch[0],branch[1]),min_version) + except self.errors.ConfigurationError: + find_valac(self,'valac',min_version) +@conf +def check_vala_deps(self): + if not self.env['HAVE_GOBJECT']: + pkg_args={'package':'gobject-2.0','uselib_store':'GOBJECT','args':'--cflags --libs'} + if getattr(Options.options,'vala_target_glib',None): + pkg_args['atleast_version']=Options.options.vala_target_glib + self.check_cfg(**pkg_args) + if not self.env['HAVE_GTHREAD']: + pkg_args={'package':'gthread-2.0','uselib_store':'GTHREAD','args':'--cflags --libs'} + if getattr(Options.options,'vala_target_glib',None): + pkg_args['atleast_version']=Options.options.vala_target_glib + self.check_cfg(**pkg_args) +def configure(self): + self.load('gnu_dirs') + self.check_vala_deps() + self.check_vala() + self.env.VALAFLAGS=['-C','--quiet'] +def options(opt): + opt.load('gnu_dirs') + valaopts=opt.add_option_group('Vala Compiler Options') + valaopts.add_option('--vala-target-glib',default=None,dest='vala_target_glib',metavar='MAJOR.MINOR',help='Target version of glib for Vala GObject code generation') diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/waf_unit_test.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/waf_unit_test.py new file mode 100644 index 0000000..3363172 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/waf_unit_test.py @@ -0,0 +1,97 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,sys +from waflib.TaskGen import feature,after_method +from waflib import Utils,Task,Logs,Options +testlock=Utils.threading.Lock() +@feature('test') +@after_method('apply_link') +def make_test(self): + if getattr(self,'link_task',None): + self.create_task('utest',self.link_task.outputs) +class utest(Task.Task): + color='PINK' + after=['vnum','inst'] + vars=[] + def runnable_status(self): + if getattr(Options.options,'no_tests',False): + return Task.SKIP_ME + ret=super(utest,self).runnable_status() + if ret==Task.SKIP_ME: + if getattr(Options.options,'all_tests',False): + return Task.RUN_ME + return ret + def run(self): + filename=self.inputs[0].abspath() + self.ut_exec=getattr(self.generator,'ut_exec',[filename]) + if getattr(self.generator,'ut_fun',None): + self.generator.ut_fun(self) + try: + fu=getattr(self.generator.bld,'all_test_paths') + except AttributeError: + fu=os.environ.copy() + lst=[] + for g in self.generator.bld.groups: + for tg in g: + if getattr(tg,'link_task',None): + s=tg.link_task.outputs[0].parent.abspath() + if s not in lst: + lst.append(s) + def add_path(dct,path,var): + dct[var]=os.pathsep.join(Utils.to_list(path)+[os.environ.get(var,'')]) + if Utils.is_win32: + add_path(fu,lst,'PATH') + elif Utils.unversioned_sys_platform()=='darwin': + add_path(fu,lst,'DYLD_LIBRARY_PATH') + add_path(fu,lst,'LD_LIBRARY_PATH') + else: + add_path(fu,lst,'LD_LIBRARY_PATH') + self.generator.bld.all_test_paths=fu + cwd=getattr(self.generator,'ut_cwd','')or self.inputs[0].parent.abspath() + testcmd=getattr(Options.options,'testcmd',False) + if testcmd: + self.ut_exec=(testcmd%self.ut_exec[0]).split(' ') + proc=Utils.subprocess.Popen(self.ut_exec,cwd=cwd,env=fu,stderr=Utils.subprocess.PIPE,stdout=Utils.subprocess.PIPE) + (stdout,stderr)=proc.communicate() + tup=(filename,proc.returncode,stdout,stderr) + self.generator.utest_result=tup + testlock.acquire() + try: + bld=self.generator.bld + Logs.debug("ut: %r",tup) + try: + bld.utest_results.append(tup) + except AttributeError: + bld.utest_results=[tup] + finally: + testlock.release() +def summary(bld): + lst=getattr(bld,'utest_results',[]) + if lst: + Logs.pprint('CYAN','execution summary') + total=len(lst) + tfail=len([x for x in lst if x[1]]) + Logs.pprint('CYAN',' tests that pass %d/%d'%(total-tfail,total)) + for(f,code,out,err)in lst: + if not code: + Logs.pprint('CYAN',' %s'%f) + Logs.pprint('CYAN',' tests that fail %d/%d'%(tfail,total)) + for(f,code,out,err)in lst: + if code: + Logs.pprint('CYAN',' %s'%f) +def set_exit_code(bld): + lst=getattr(bld,'utest_results',[]) + for(f,code,out,err)in lst: + if code: + msg=[] + if out: + msg.append('stdout:%s%s'%(os.linesep,out.decode('utf-8'))) + if err: + msg.append('stderr:%s%s'%(os.linesep,err.decode('utf-8'))) + bld.fatal(os.linesep.join(msg)) +def options(opt): + opt.add_option('--notests',action='store_true',default=False,help='Exec no unit tests',dest='no_tests') + opt.add_option('--alltests',action='store_true',default=False,help='Exec all unit tests',dest='all_tests') + opt.add_option('--testcmd',action='store',default=False,help='Run the unit tests using the test-cmd string'' example "--test-cmd="valgrind --error-exitcode=1'' %s" to run under valgrind',dest='testcmd') diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/winres.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/winres.py new file mode 100644 index 0000000..88904af --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/winres.py @@ -0,0 +1,85 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import re,traceback +from waflib import Task,Logs,Utils +from waflib.TaskGen import extension +from waflib.Tools import c_preproc +@extension('.rc') +def rc_file(self,node): + obj_ext='.rc.o' + if self.env['WINRC_TGT_F']=='/fo': + obj_ext='.res' + rctask=self.create_task('winrc',node,node.change_ext(obj_ext)) + try: + self.compiled_tasks.append(rctask) + except AttributeError: + self.compiled_tasks=[rctask] +re_lines=re.compile('(?:^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*?)\s*$)|''(?:^\w+[ \t]*(ICON|BITMAP|CURSOR|HTML|FONT|MESSAGETABLE|TYPELIB|REGISTRY|D3DFX)[ \t]*(.*?)\s*$)',re.IGNORECASE|re.MULTILINE) +class rc_parser(c_preproc.c_parser): + def filter_comments(self,filepath): + code=Utils.readf(filepath) + if c_preproc.use_trigraphs: + for(a,b)in c_preproc.trig_def:code=code.split(a).join(b) + code=c_preproc.re_nl.sub('',code) + code=c_preproc.re_cpp.sub(c_preproc.repl,code) + ret=[] + for m in re.finditer(re_lines,code): + if m.group(2): + ret.append((m.group(2),m.group(3))) + else: + ret.append(('include',m.group(5))) + return ret + def addlines(self,node): + self.currentnode_stack.append(node.parent) + filepath=node.abspath() + self.count_files+=1 + if self.count_files>c_preproc.recursion_limit: + raise c_preproc.PreprocError("recursion limit exceeded") + pc=self.parse_cache + Logs.debug('preproc: reading file %r',filepath) + try: + lns=pc[filepath] + except KeyError: + pass + else: + self.lines.extend(lns) + return + try: + lines=self.filter_comments(filepath) + lines.append((c_preproc.POPFILE,'')) + lines.reverse() + pc[filepath]=lines + self.lines.extend(lines) + except IOError: + raise c_preproc.PreprocError("could not read the file %s"%filepath) + except Exception: + if Logs.verbose>0: + Logs.error("parsing %s failed"%filepath) + traceback.print_exc() +class winrc(Task.Task): + run_str='${WINRC} ${WINRCFLAGS} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${WINRC_TGT_F} ${TGT} ${WINRC_SRC_F} ${SRC}' + color='BLUE' + def scan(self): + tmp=rc_parser(self.generator.includes_nodes) + tmp.start(self.inputs[0],self.env) + nodes=tmp.nodes + names=tmp.names + if Logs.verbose: + Logs.debug('deps: deps for %s: %r; unresolved %r'%(str(self),nodes,names)) + return(nodes,names) +def configure(conf): + v=conf.env + v['WINRC_TGT_F']='-o' + v['WINRC_SRC_F']='-i' + if not conf.env.WINRC: + if v.CC_NAME=='msvc': + conf.find_program('RC',var='WINRC',path_list=v['PATH']) + v['WINRC_TGT_F']='/fo' + v['WINRC_SRC_F']='' + else: + conf.find_program('windres',var='WINRC',path_list=v['PATH']) + if not conf.env.WINRC: + conf.fatal('winrc was not found!') + v['WINRCFLAGS']=[] diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/xlc.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/xlc.py new file mode 100644 index 0000000..fbf0fcf --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/xlc.py @@ -0,0 +1,45 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +from waflib.Tools import ccroot,ar +from waflib.Configure import conf +@conf +def find_xlc(conf): + cc=conf.find_program(['xlc_r','xlc'],var='CC') + cc=conf.cmd_to_list(cc) + conf.get_xlc_version(cc) + conf.env.CC_NAME='xlc' + conf.env.CC=cc +@conf +def xlc_common_flags(conf): + v=conf.env + v['CC_SRC_F']=[] + v['CC_TGT_F']=['-c','-o'] + if not v['LINK_CC']:v['LINK_CC']=v['CC'] + v['CCLNK_SRC_F']=[] + v['CCLNK_TGT_F']=['-o'] + v['CPPPATH_ST']='-I%s' + v['DEFINES_ST']='-D%s' + v['LIB_ST']='-l%s' + v['LIBPATH_ST']='-L%s' + v['STLIB_ST']='-l%s' + v['STLIBPATH_ST']='-L%s' + v['RPATH_ST']='-Wl,-rpath,%s' + v['SONAME_ST']=[] + v['SHLIB_MARKER']=[] + v['STLIB_MARKER']=[] + v['LINKFLAGS_cprogram']=['-Wl,-brtl'] + v['cprogram_PATTERN']='%s' + v['CFLAGS_cshlib']=['-fPIC'] + v['LINKFLAGS_cshlib']=['-G','-Wl,-brtl,-bexpfull'] + v['cshlib_PATTERN']='lib%s.so' + v['LINKFLAGS_cstlib']=[] + v['cstlib_PATTERN']='lib%s.a' +def configure(conf): + conf.find_xlc() + conf.find_ar() + conf.xlc_common_flags() + conf.cc_load_tools() + conf.cc_add_flags() + conf.link_add_flags() diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/xlcxx.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/xlcxx.py new file mode 100644 index 0000000..b7efb23 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Tools/xlcxx.py @@ -0,0 +1,45 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +from waflib.Tools import ccroot,ar +from waflib.Configure import conf +@conf +def find_xlcxx(conf): + cxx=conf.find_program(['xlc++_r','xlc++'],var='CXX') + cxx=conf.cmd_to_list(cxx) + conf.get_xlc_version(cxx) + conf.env.CXX_NAME='xlc++' + conf.env.CXX=cxx +@conf +def xlcxx_common_flags(conf): + v=conf.env + v['CXX_SRC_F']=[] + v['CXX_TGT_F']=['-c','-o'] + if not v['LINK_CXX']:v['LINK_CXX']=v['CXX'] + v['CXXLNK_SRC_F']=[] + v['CXXLNK_TGT_F']=['-o'] + v['CPPPATH_ST']='-I%s' + v['DEFINES_ST']='-D%s' + v['LIB_ST']='-l%s' + v['LIBPATH_ST']='-L%s' + v['STLIB_ST']='-l%s' + v['STLIBPATH_ST']='-L%s' + v['RPATH_ST']='-Wl,-rpath,%s' + v['SONAME_ST']=[] + v['SHLIB_MARKER']=[] + v['STLIB_MARKER']=[] + v['LINKFLAGS_cxxprogram']=['-Wl,-brtl'] + v['cxxprogram_PATTERN']='%s' + v['CXXFLAGS_cxxshlib']=['-fPIC'] + v['LINKFLAGS_cxxshlib']=['-G','-Wl,-brtl,-bexpfull'] + v['cxxshlib_PATTERN']='lib%s.so' + v['LINKFLAGS_cxxstlib']=[] + v['cxxstlib_PATTERN']='lib%s.a' +def configure(conf): + conf.find_xlcxx() + conf.find_ar() + conf.xlcxx_common_flags() + conf.cxx_load_tools() + conf.cxx_add_flags() + conf.link_add_flags() diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Utils.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Utils.py new file mode 100644 index 0000000..548f5db --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/Utils.py @@ -0,0 +1,412 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os,sys,errno,traceback,inspect,re,shutil,datetime,gc +import subprocess +try: + from collections import deque +except ImportError: + class deque(list): + def popleft(self): + return self.pop(0) +try: + import _winreg as winreg +except ImportError: + try: + import winreg + except ImportError: + winreg=None +from waflib import Errors +try: + from collections import UserDict +except ImportError: + from UserDict import UserDict +try: + from hashlib import md5 +except ImportError: + try: + from md5 import md5 + except ImportError: + pass +try: + import threading +except ImportError: + class threading(object): + pass + class Lock(object): + def acquire(self): + pass + def release(self): + pass + threading.Lock=threading.Thread=Lock +else: + run_old=threading.Thread.run + def run(*args,**kwargs): + try: + run_old(*args,**kwargs) + except(KeyboardInterrupt,SystemExit): + raise + except Exception: + sys.excepthook(*sys.exc_info()) + threading.Thread.run=run +SIG_NIL='iluvcuteoverload'.encode() +O644=420 +O755=493 +rot_chr=['\\','|','/','-'] +rot_idx=0 +try: + from collections import defaultdict +except ImportError: + class defaultdict(dict): + def __init__(self,default_factory): + super(defaultdict,self).__init__() + self.default_factory=default_factory + def __getitem__(self,key): + try: + return super(defaultdict,self).__getitem__(key) + except KeyError: + value=self.default_factory() + self[key]=value + return value +is_win32=sys.platform in('win32','cli') +indicator='\x1b[K%s%s%s\r' +if is_win32 and'NOCOLOR'in os.environ: + indicator='%s%s%s\r' +def readf(fname,m='r',encoding='ISO8859-1'): + if sys.hexversion>0x3000000 and not'b'in m: + m+='b' + f=open(fname,m) + try: + txt=f.read() + finally: + f.close() + txt=txt.decode(encoding) + else: + f=open(fname,m) + try: + txt=f.read() + finally: + f.close() + return txt +def writef(fname,data,m='w',encoding='ISO8859-1'): + if sys.hexversion>0x3000000 and not'b'in m: + data=data.encode(encoding) + m+='b' + f=open(fname,m) + try: + f.write(data) + finally: + f.close() +def h_file(fname): + f=open(fname,'rb') + m=md5() + try: + while fname: + fname=f.read(200000) + m.update(fname) + finally: + f.close() + return m.digest() +if hasattr(os,'O_NOINHERIT')and sys.hexversion<0x3040000: + def readf_win32(f,m='r',encoding='ISO8859-1'): + flags=os.O_NOINHERIT|os.O_RDONLY + if'b'in m: + flags|=os.O_BINARY + if'+'in m: + flags|=os.O_RDWR + try: + fd=os.open(f,flags) + except OSError: + raise IOError('Cannot read from %r'%f) + if sys.hexversion>0x3000000 and not'b'in m: + m+='b' + f=os.fdopen(fd,m) + try: + txt=f.read() + finally: + f.close() + txt=txt.decode(encoding) + else: + f=os.fdopen(fd,m) + try: + txt=f.read() + finally: + f.close() + return txt + def writef_win32(f,data,m='w',encoding='ISO8859-1'): + if sys.hexversion>0x3000000 and not'b'in m: + data=data.encode(encoding) + m+='b' + flags=os.O_CREAT|os.O_TRUNC|os.O_WRONLY|os.O_NOINHERIT + if'b'in m: + flags|=os.O_BINARY + if'+'in m: + flags|=os.O_RDWR + try: + fd=os.open(f,flags) + except OSError: + raise IOError('Cannot write to %r'%f) + f=os.fdopen(fd,m) + try: + f.write(data) + finally: + f.close() + def h_file_win32(fname): + try: + fd=os.open(fname,os.O_BINARY|os.O_RDONLY|os.O_NOINHERIT) + except OSError: + raise IOError('Cannot read from %r'%fname) + f=os.fdopen(fd,'rb') + m=md5() + try: + while fname: + fname=f.read(200000) + m.update(fname) + finally: + f.close() + return m.digest() + readf_old=readf + writef_old=writef + h_file_old=h_file + readf=readf_win32 + writef=writef_win32 + h_file=h_file_win32 +try: + x=''.encode('hex') +except LookupError: + import binascii + def to_hex(s): + ret=binascii.hexlify(s) + if not isinstance(ret,str): + ret=ret.decode('utf-8') + return ret +else: + def to_hex(s): + return s.encode('hex') +to_hex.__doc__=""" +Return the hexadecimal representation of a string + +:param s: string to convert +:type s: string +""" +listdir=os.listdir +if is_win32: + def listdir_win32(s): + if not s: + try: + import ctypes + except ImportError: + return[x+':\\'for x in list('ABCDEFGHIJKLMNOPQRSTUVWXYZ')] + else: + dlen=4 + maxdrives=26 + buf=ctypes.create_string_buffer(maxdrives*dlen) + ndrives=ctypes.windll.kernel32.GetLogicalDriveStringsA(maxdrives*dlen,ctypes.byref(buf)) + return[str(buf.raw[4*i:4*i+2].decode('ascii'))for i in range(int(ndrives/dlen))] + if len(s)==2 and s[1]==":": + s+=os.sep + if not os.path.isdir(s): + e=OSError('%s is not a directory'%s) + e.errno=errno.ENOENT + raise e + return os.listdir(s) + listdir=listdir_win32 +def num2ver(ver): + if isinstance(ver,str): + ver=tuple(ver.split('.')) + if isinstance(ver,tuple): + ret=0 + for i in range(4): + if i<len(ver): + ret+=256**(3-i)*int(ver[i]) + return ret + return ver +def ex_stack(): + exc_type,exc_value,tb=sys.exc_info() + exc_lines=traceback.format_exception(exc_type,exc_value,tb) + return''.join(exc_lines) +def to_list(sth): + if isinstance(sth,str): + return sth.split() + else: + return sth +re_nl=re.compile('\r*\n',re.M) +def str_to_dict(txt): + tbl={} + lines=re_nl.split(txt) + for x in lines: + x=x.strip() + if not x or x.startswith('#')or x.find('=')<0: + continue + tmp=x.split('=') + tbl[tmp[0].strip()]='='.join(tmp[1:]).strip() + return tbl +def split_path(path): + return path.split('/') +def split_path_cygwin(path): + if path.startswith('//'): + ret=path.split('/')[2:] + ret[0]='/'+ret[0] + return ret + return path.split('/') +re_sp=re.compile('[/\\\\]') +def split_path_win32(path): + if path.startswith('\\\\'): + ret=re.split(re_sp,path)[2:] + ret[0]='\\'+ret[0] + return ret + return re.split(re_sp,path) +if sys.platform=='cygwin': + split_path=split_path_cygwin +elif is_win32: + split_path=split_path_win32 +split_path.__doc__=""" +Split a path by / or \\. This function is not like os.path.split + +:type path: string +:param path: path to split +:return: list of strings +""" +def check_dir(path): + if not os.path.isdir(path): + try: + os.makedirs(path) + except OSError as e: + if not os.path.isdir(path): + raise Errors.WafError('Cannot create the folder %r'%path,ex=e) +def def_attrs(cls,**kw): + for k,v in kw.items(): + if not hasattr(cls,k): + setattr(cls,k,v) +def quote_define_name(s): + fu=re.compile("[^a-zA-Z0-9]").sub("_",s) + fu=fu.upper() + return fu +def h_list(lst): + m=md5() + m.update(str(lst).encode()) + return m.digest() +def h_fun(fun): + try: + return fun.code + except AttributeError: + try: + h=inspect.getsource(fun) + except IOError: + h="nocode" + try: + fun.code=h + except AttributeError: + pass + return h +reg_subst=re.compile(r"(\\\\)|(\$\$)|\$\{([^}]+)\}") +def subst_vars(expr,params): + def repl_var(m): + if m.group(1): + return'\\' + if m.group(2): + return'$' + try: + return params.get_flat(m.group(3)) + except AttributeError: + return params[m.group(3)] + return reg_subst.sub(repl_var,expr) +def destos_to_binfmt(key): + if key=='darwin': + return'mac-o' + elif key in('win32','cygwin','uwin','msys'): + return'pe' + return'elf' +def unversioned_sys_platform(): + s=sys.platform + if s=='java': + from java.lang import System + s=System.getProperty('os.name') + if s=='Mac OS X': + return'darwin' + elif s.startswith('Windows '): + return'win32' + elif s=='OS/2': + return'os2' + elif s=='HP-UX': + return'hpux' + elif s in('SunOS','Solaris'): + return'sunos' + else:s=s.lower() + if s=='powerpc': + return'darwin' + if s=='win32'or s.endswith('os2')and s!='sunos2':return s + return re.split('\d+$',s)[0] +def nada(*k,**kw): + pass +class Timer(object): + def __init__(self): + self.start_time=datetime.datetime.utcnow() + def __str__(self): + delta=datetime.datetime.utcnow()-self.start_time + days=int(delta.days) + hours=delta.seconds//3600 + minutes=(delta.seconds-hours*3600)//60 + seconds=delta.seconds-hours*3600-minutes*60+float(delta.microseconds)/1000/1000 + result='' + if days: + result+='%dd'%days + if days or hours: + result+='%dh'%hours + if days or hours or minutes: + result+='%dm'%minutes + return'%s%.3fs'%(result,seconds) +if is_win32: + old=shutil.copy2 + def copy2(src,dst): + old(src,dst) + shutil.copystat(src,dst) + setattr(shutil,'copy2',copy2) +if os.name=='java': + try: + gc.disable() + gc.enable() + except NotImplementedError: + gc.disable=gc.enable +def read_la_file(path): + sp=re.compile(r'^([^=]+)=\'(.*)\'$') + dc={} + for line in readf(path).splitlines(): + try: + _,left,right,_=sp.split(line.strip()) + dc[left]=right + except ValueError: + pass + return dc +def nogc(fun): + def f(*k,**kw): + try: + gc.disable() + ret=fun(*k,**kw) + finally: + gc.enable() + return ret + f.__doc__=fun.__doc__ + return f +def run_once(fun): + cache={} + def wrap(k): + try: + return cache[k] + except KeyError: + ret=fun(k) + cache[k]=ret + return ret + wrap.__cache__=cache + return wrap +def get_registry_app_path(key,filename): + if not winreg: + return None + try: + result=winreg.QueryValue(key,"Software\\Microsoft\\Windows\\CurrentVersion\\App Paths\\%s.exe"%filename[0]) + except WindowsError: + pass + else: + if os.path.isfile(result): + return result diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__init__.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__init__.py new file mode 100644 index 0000000..efeed79 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__init__.py @@ -0,0 +1,4 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/Build.cpython-34.pyc b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/Build.cpython-34.pyc Binary files differnew file mode 100644 index 0000000..17341d8 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/Build.cpython-34.pyc diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/ConfigSet.cpython-34.pyc b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/ConfigSet.cpython-34.pyc Binary files differnew file mode 100644 index 0000000..91eb4c0 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/ConfigSet.cpython-34.pyc diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/Configure.cpython-34.pyc b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/Configure.cpython-34.pyc Binary files differnew file mode 100644 index 0000000..d127961 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/Configure.cpython-34.pyc diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/Context.cpython-34.pyc b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/Context.cpython-34.pyc Binary files differnew file mode 100644 index 0000000..98bbb52 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/Context.cpython-34.pyc diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/Errors.cpython-34.pyc b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/Errors.cpython-34.pyc Binary files differnew file mode 100644 index 0000000..e5f193b --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/Errors.cpython-34.pyc diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/Logs.cpython-34.pyc b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/Logs.cpython-34.pyc Binary files differnew file mode 100644 index 0000000..c74e5cf --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/Logs.cpython-34.pyc diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/Node.cpython-34.pyc b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/Node.cpython-34.pyc Binary files differnew file mode 100644 index 0000000..e2fde07 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/Node.cpython-34.pyc diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/Options.cpython-34.pyc b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/Options.cpython-34.pyc Binary files differnew file mode 100644 index 0000000..637b45c --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/Options.cpython-34.pyc diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/Runner.cpython-34.pyc b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/Runner.cpython-34.pyc Binary files differnew file mode 100644 index 0000000..8644b04 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/Runner.cpython-34.pyc diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/Scripting.cpython-34.pyc b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/Scripting.cpython-34.pyc Binary files differnew file mode 100644 index 0000000..15e31f6 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/Scripting.cpython-34.pyc diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/Task.cpython-34.pyc b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/Task.cpython-34.pyc Binary files differnew file mode 100644 index 0000000..cdad7ad --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/Task.cpython-34.pyc diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/TaskGen.cpython-34.pyc b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/TaskGen.cpython-34.pyc Binary files differnew file mode 100644 index 0000000..698063c --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/TaskGen.cpython-34.pyc diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/Utils.cpython-34.pyc b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/Utils.cpython-34.pyc Binary files differnew file mode 100644 index 0000000..f2a6f73 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/Utils.cpython-34.pyc diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/__init__.cpython-34.pyc b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/__init__.cpython-34.pyc Binary files differnew file mode 100644 index 0000000..fcd5cf3 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/__init__.cpython-34.pyc diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/ansiterm.cpython-34.pyc b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/ansiterm.cpython-34.pyc Binary files differnew file mode 100644 index 0000000..764e96f --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/__pycache__/ansiterm.cpython-34.pyc diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/ansiterm.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/ansiterm.py new file mode 100644 index 0000000..a959e12 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/ansiterm.py @@ -0,0 +1,191 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import sys,os +try: + if not(sys.stderr.isatty()and sys.stdout.isatty()): + raise ValueError('not a tty') + from ctypes import Structure,windll,c_short,c_ushort,c_ulong,c_int,byref,POINTER,c_long,c_wchar + class COORD(Structure): + _fields_=[("X",c_short),("Y",c_short)] + class SMALL_RECT(Structure): + _fields_=[("Left",c_short),("Top",c_short),("Right",c_short),("Bottom",c_short)] + class CONSOLE_SCREEN_BUFFER_INFO(Structure): + _fields_=[("Size",COORD),("CursorPosition",COORD),("Attributes",c_short),("Window",SMALL_RECT),("MaximumWindowSize",COORD)] + class CONSOLE_CURSOR_INFO(Structure): + _fields_=[('dwSize',c_ulong),('bVisible',c_int)] + windll.kernel32.GetStdHandle.argtypes=[c_ulong] + windll.kernel32.GetStdHandle.restype=c_ulong + windll.kernel32.GetConsoleScreenBufferInfo.argtypes=[c_ulong,POINTER(CONSOLE_SCREEN_BUFFER_INFO)] + windll.kernel32.GetConsoleScreenBufferInfo.restype=c_long + windll.kernel32.SetConsoleTextAttribute.argtypes=[c_ulong,c_ushort] + windll.kernel32.SetConsoleTextAttribute.restype=c_long + windll.kernel32.FillConsoleOutputCharacterW.argtypes=[c_ulong,c_wchar,c_ulong,POINTER(COORD),POINTER(c_ulong)] + windll.kernel32.FillConsoleOutputCharacterW.restype=c_long + windll.kernel32.FillConsoleOutputAttribute.argtypes=[c_ulong,c_ushort,c_ulong,POINTER(COORD),POINTER(c_ulong)] + windll.kernel32.FillConsoleOutputAttribute.restype=c_long + windll.kernel32.SetConsoleCursorPosition.argtypes=[c_ulong,POINTER(COORD)] + windll.kernel32.SetConsoleCursorPosition.restype=c_long + windll.kernel32.SetConsoleCursorInfo.argtypes=[c_ulong,POINTER(CONSOLE_CURSOR_INFO)] + windll.kernel32.SetConsoleCursorInfo.restype=c_long + sbinfo=CONSOLE_SCREEN_BUFFER_INFO() + csinfo=CONSOLE_CURSOR_INFO() + hconsole=windll.kernel32.GetStdHandle(-11) + windll.kernel32.GetConsoleScreenBufferInfo(hconsole,byref(sbinfo)) + if sbinfo.Size.X<9 or sbinfo.Size.Y<9:raise ValueError('small console') + windll.kernel32.GetConsoleCursorInfo(hconsole,byref(csinfo)) +except Exception: + pass +else: + import re,threading + is_vista=getattr(sys,"getwindowsversion",None)and sys.getwindowsversion()[0]>=6 + try: + _type=unicode + except NameError: + _type=str + to_int=lambda number,default:number and int(number)or default + wlock=threading.Lock() + STD_OUTPUT_HANDLE=-11 + STD_ERROR_HANDLE=-12 + class AnsiTerm(object): + def __init__(self): + self.encoding=sys.stdout.encoding + self.hconsole=windll.kernel32.GetStdHandle(STD_OUTPUT_HANDLE) + self.cursor_history=[] + self.orig_sbinfo=CONSOLE_SCREEN_BUFFER_INFO() + self.orig_csinfo=CONSOLE_CURSOR_INFO() + windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole,byref(self.orig_sbinfo)) + windll.kernel32.GetConsoleCursorInfo(hconsole,byref(self.orig_csinfo)) + def screen_buffer_info(self): + sbinfo=CONSOLE_SCREEN_BUFFER_INFO() + windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole,byref(sbinfo)) + return sbinfo + def clear_line(self,param): + mode=param and int(param)or 0 + sbinfo=self.screen_buffer_info() + if mode==1: + line_start=COORD(0,sbinfo.CursorPosition.Y) + line_length=sbinfo.Size.X + elif mode==2: + line_start=COORD(sbinfo.CursorPosition.X,sbinfo.CursorPosition.Y) + line_length=sbinfo.Size.X-sbinfo.CursorPosition.X + else: + line_start=sbinfo.CursorPosition + line_length=sbinfo.Size.X-sbinfo.CursorPosition.X + chars_written=c_ulong() + windll.kernel32.FillConsoleOutputCharacterW(self.hconsole,c_wchar(' '),line_length,line_start,byref(chars_written)) + windll.kernel32.FillConsoleOutputAttribute(self.hconsole,sbinfo.Attributes,line_length,line_start,byref(chars_written)) + def clear_screen(self,param): + mode=to_int(param,0) + sbinfo=self.screen_buffer_info() + if mode==1: + clear_start=COORD(0,0) + clear_length=sbinfo.CursorPosition.X*sbinfo.CursorPosition.Y + elif mode==2: + clear_start=COORD(0,0) + clear_length=sbinfo.Size.X*sbinfo.Size.Y + windll.kernel32.SetConsoleCursorPosition(self.hconsole,clear_start) + else: + clear_start=sbinfo.CursorPosition + clear_length=((sbinfo.Size.X-sbinfo.CursorPosition.X)+sbinfo.Size.X*(sbinfo.Size.Y-sbinfo.CursorPosition.Y)) + chars_written=c_ulong() + windll.kernel32.FillConsoleOutputCharacterW(self.hconsole,c_wchar(' '),clear_length,clear_start,byref(chars_written)) + windll.kernel32.FillConsoleOutputAttribute(self.hconsole,sbinfo.Attributes,clear_length,clear_start,byref(chars_written)) + def push_cursor(self,param): + sbinfo=self.screen_buffer_info() + self.cursor_history.append(sbinfo.CursorPosition) + def pop_cursor(self,param): + if self.cursor_history: + old_pos=self.cursor_history.pop() + windll.kernel32.SetConsoleCursorPosition(self.hconsole,old_pos) + def set_cursor(self,param): + y,sep,x=param.partition(';') + x=to_int(x,1)-1 + y=to_int(y,1)-1 + sbinfo=self.screen_buffer_info() + new_pos=COORD(min(max(0,x),sbinfo.Size.X),min(max(0,y),sbinfo.Size.Y)) + windll.kernel32.SetConsoleCursorPosition(self.hconsole,new_pos) + def set_column(self,param): + x=to_int(param,1)-1 + sbinfo=self.screen_buffer_info() + new_pos=COORD(min(max(0,x),sbinfo.Size.X),sbinfo.CursorPosition.Y) + windll.kernel32.SetConsoleCursorPosition(self.hconsole,new_pos) + def move_cursor(self,x_offset=0,y_offset=0): + sbinfo=self.screen_buffer_info() + new_pos=COORD(min(max(0,sbinfo.CursorPosition.X+x_offset),sbinfo.Size.X),min(max(0,sbinfo.CursorPosition.Y+y_offset),sbinfo.Size.Y)) + windll.kernel32.SetConsoleCursorPosition(self.hconsole,new_pos) + def move_up(self,param): + self.move_cursor(y_offset=-to_int(param,1)) + def move_down(self,param): + self.move_cursor(y_offset=to_int(param,1)) + def move_left(self,param): + self.move_cursor(x_offset=-to_int(param,1)) + def move_right(self,param): + self.move_cursor(x_offset=to_int(param,1)) + def next_line(self,param): + sbinfo=self.screen_buffer_info() + self.move_cursor(x_offset=-sbinfo.CursorPosition.X,y_offset=to_int(param,1)) + def prev_line(self,param): + sbinfo=self.screen_buffer_info() + self.move_cursor(x_offset=-sbinfo.CursorPosition.X,y_offset=-to_int(param,1)) + def rgb2bgr(self,c): + return((c&1)<<2)|(c&2)|((c&4)>>2) + def set_color(self,param): + cols=param.split(';') + sbinfo=CONSOLE_SCREEN_BUFFER_INFO() + windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole,byref(sbinfo)) + attr=sbinfo.Attributes + for c in cols: + if is_vista: + c=int(c) + else: + c=to_int(c,0) + if 29<c<38: + attr=(attr&0xfff0)|self.rgb2bgr(c-30) + elif 39<c<48: + attr=(attr&0xff0f)|(self.rgb2bgr(c-40)<<4) + elif c==0: + attr=self.orig_sbinfo.Attributes + elif c==1: + attr|=0x08 + elif c==4: + attr|=0x80 + elif c==7: + attr=(attr&0xff88)|((attr&0x70)>>4)|((attr&0x07)<<4) + windll.kernel32.SetConsoleTextAttribute(self.hconsole,attr) + def show_cursor(self,param): + csinfo.bVisible=1 + windll.kernel32.SetConsoleCursorInfo(self.hconsole,byref(csinfo)) + def hide_cursor(self,param): + csinfo.bVisible=0 + windll.kernel32.SetConsoleCursorInfo(self.hconsole,byref(csinfo)) + ansi_command_table={'A':move_up,'B':move_down,'C':move_right,'D':move_left,'E':next_line,'F':prev_line,'G':set_column,'H':set_cursor,'f':set_cursor,'J':clear_screen,'K':clear_line,'h':show_cursor,'l':hide_cursor,'m':set_color,'s':push_cursor,'u':pop_cursor,} + ansi_tokens=re.compile('(?:\x1b\[([0-9?;]*)([a-zA-Z])|([^\x1b]+))') + def write(self,text): + try: + wlock.acquire() + for param,cmd,txt in self.ansi_tokens.findall(text): + if cmd: + cmd_func=self.ansi_command_table.get(cmd) + if cmd_func: + cmd_func(self,param) + else: + self.writeconsole(txt) + finally: + wlock.release() + def writeconsole(self,txt): + chars_written=c_int() + writeconsole=windll.kernel32.WriteConsoleA + if isinstance(txt,_type): + writeconsole=windll.kernel32.WriteConsoleW + TINY_STEP=3000 + for x in range(0,len(txt),TINY_STEP): + tiny=txt[x:x+TINY_STEP] + writeconsole(self.hconsole,tiny,len(tiny),byref(chars_written),None) + def flush(self): + pass + def isatty(self): + return True + sys.stderr=sys.stdout=AnsiTerm() + os.environ['TERM']='vt100' diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/extras/__init__.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/extras/__init__.py new file mode 100644 index 0000000..efeed79 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/extras/__init__.py @@ -0,0 +1,4 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/extras/__pycache__/__init__.cpython-34.pyc b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/extras/__pycache__/__init__.cpython-34.pyc Binary files differnew file mode 100644 index 0000000..6505739 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/extras/__pycache__/__init__.cpython-34.pyc diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/extras/__pycache__/autowaf.cpython-34.pyc b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/extras/__pycache__/autowaf.cpython-34.pyc Binary files differnew file mode 100644 index 0000000..823fa79 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/extras/__pycache__/autowaf.cpython-34.pyc diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/extras/autowaf.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/extras/autowaf.py new file mode 100644 index 0000000..d2fe731 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/extras/autowaf.py @@ -0,0 +1,574 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import glob +import os +import subprocess +import sys +from waflib import Configure,Context,Logs,Node,Options,Task,Utils +from waflib.TaskGen import feature,before,after +global g_is_child +g_is_child=False +global g_step +g_step=0 +@feature('c','cxx') +@after('apply_incpaths') +def include_config_h(self): + self.env.append_value('INCPATHS',self.bld.bldnode.abspath()) +def set_options(opt,debug_by_default=False): + global g_step + if g_step>0: + return + dirs_options=opt.add_option_group('Installation directories','') + for k in('--prefix','--destdir'): + option=opt.parser.get_option(k) + if option: + opt.parser.remove_option(k) + dirs_options.add_option(option) + dirs_options.add_option('--bindir',type='string',help="Executable programs [Default: PREFIX/bin]") + dirs_options.add_option('--configdir',type='string',help="Configuration data [Default: PREFIX/etc]") + dirs_options.add_option('--datadir',type='string',help="Shared data [Default: PREFIX/share]") + dirs_options.add_option('--includedir',type='string',help="Header files [Default: PREFIX/include]") + dirs_options.add_option('--libdir',type='string',help="Libraries [Default: PREFIX/lib]") + dirs_options.add_option('--mandir',type='string',help="Manual pages [Default: DATADIR/man]") + dirs_options.add_option('--docdir',type='string',help="HTML documentation [Default: DATADIR/doc]") + if debug_by_default: + opt.add_option('--optimize',action='store_false',default=True,dest='debug',help="Build optimized binaries") + else: + opt.add_option('--debug',action='store_true',default=False,dest='debug',help="Build debuggable binaries") + opt.add_option('--pardebug',action='store_true',default=False,dest='pardebug',help="Build parallel-installable debuggable libraries with D suffix") + opt.add_option('--grind',action='store_true',default=False,dest='grind',help="Run tests in valgrind") + opt.add_option('--strict',action='store_true',default=False,dest='strict',help="Use strict compiler flags and show all warnings") + opt.add_option('--ultra-strict',action='store_true',default=False,dest='ultra_strict',help="Use even stricter compiler flags (likely to trigger many warnings in library headers)") + opt.add_option('--docs',action='store_true',default=False,dest='docs',help="Build documentation - requires doxygen") + opt.add_option('--lv2-user',action='store_true',default=False,dest='lv2_user',help="Install LV2 bundles to user location") + opt.add_option('--lv2-system',action='store_true',default=False,dest='lv2_system',help="Install LV2 bundles to system location") + dirs_options.add_option('--lv2dir',type='string',help="LV2 bundles [Default: LIBDIR/lv2]") + g_step=1 +def check_header(conf,lang,name,define='',mandatory=True): + includes='' + if sys.platform=="darwin": + includes='/opt/local/include' + if lang=='c': + check_func=conf.check_cc + elif lang=='cxx': + check_func=conf.check_cxx + else: + Logs.error("Unknown header language `%s'"%lang) + return + if define!='': + check_func(header_name=name,includes=includes,define_name=define,mandatory=mandatory) + else: + check_func(header_name=name,includes=includes,mandatory=mandatory) +def nameify(name): + return name.replace('/','_').replace('++','PP').replace('-','_').replace('.','_') +def define(conf,var_name,value): + conf.define(var_name,value) + conf.env[var_name]=value +def check_pkg(conf,name,**args): + if args['uselib_store'].lower()in conf.env['AUTOWAF_LOCAL_LIBS']: + return + class CheckType: + OPTIONAL=1 + MANDATORY=2 + var_name='CHECKED_'+nameify(args['uselib_store']) + check=not var_name in conf.env + mandatory=not'mandatory'in args or args['mandatory'] + if not check and'atleast_version'in args: + checked_version=conf.env['VERSION_'+name] + if checked_version and checked_version<args['atleast_version']: + check=True; + if not check and mandatory and conf.env[var_name]==CheckType.OPTIONAL: + check=True; + if check: + found=None + pkg_var_name='PKG_'+name.replace('-','_') + pkg_name=name + if conf.env.PARDEBUG: + args['mandatory']=False + found=conf.check_cfg(package=pkg_name+'D',args="--cflags --libs",**args) + if found: + pkg_name+='D' + if mandatory: + args['mandatory']=True + if not found: + found=conf.check_cfg(package=pkg_name,args="--cflags --libs",**args) + if found: + conf.env[pkg_var_name]=pkg_name + if'atleast_version'in args: + conf.env['VERSION_'+name]=args['atleast_version'] + if mandatory: + conf.env[var_name]=CheckType.MANDATORY + else: + conf.env[var_name]=CheckType.OPTIONAL +def normpath(path): + if sys.platform=='win32': + return os.path.normpath(path).replace('\\','/') + else: + return os.path.normpath(path) +def configure(conf): + global g_step + if g_step>1: + return + def append_cxx_flags(flags): + conf.env.append_value('CFLAGS',flags) + conf.env.append_value('CXXFLAGS',flags) + print('') + display_header('Global Configuration') + if Options.options.docs: + conf.load('doxygen') + conf.env['DOCS']=Options.options.docs + conf.env['DEBUG']=Options.options.debug or Options.options.pardebug + conf.env['PARDEBUG']=Options.options.pardebug + conf.env['PREFIX']=normpath(os.path.abspath(os.path.expanduser(conf.env['PREFIX']))) + def config_dir(var,opt,default): + if opt: + conf.env[var]=normpath(opt) + else: + conf.env[var]=normpath(default) + opts=Options.options + prefix=conf.env['PREFIX'] + config_dir('BINDIR',opts.bindir,os.path.join(prefix,'bin')) + config_dir('SYSCONFDIR',opts.configdir,os.path.join(prefix,'etc')) + config_dir('DATADIR',opts.datadir,os.path.join(prefix,'share')) + config_dir('INCLUDEDIR',opts.includedir,os.path.join(prefix,'include')) + config_dir('LIBDIR',opts.libdir,os.path.join(prefix,'lib')) + config_dir('MANDIR',opts.mandir,os.path.join(conf.env['DATADIR'],'man')) + config_dir('DOCDIR',opts.docdir,os.path.join(conf.env['DATADIR'],'doc')) + if Options.options.lv2dir: + conf.env['LV2DIR']=Options.options.lv2dir + elif Options.options.lv2_user: + if sys.platform=="darwin": + conf.env['LV2DIR']=os.path.join(os.getenv('HOME'),'Library/Audio/Plug-Ins/LV2') + elif sys.platform=="win32": + conf.env['LV2DIR']=os.path.join(os.getenv('APPDATA'),'LV2') + else: + conf.env['LV2DIR']=os.path.join(os.getenv('HOME'),'.lv2') + elif Options.options.lv2_system: + if sys.platform=="darwin": + conf.env['LV2DIR']='/Library/Audio/Plug-Ins/LV2' + elif sys.platform=="win32": + conf.env['LV2DIR']=os.path.join(os.getenv('COMMONPROGRAMFILES'),'LV2') + else: + conf.env['LV2DIR']=os.path.join(conf.env['LIBDIR'],'lv2') + else: + conf.env['LV2DIR']=os.path.join(conf.env['LIBDIR'],'lv2') + conf.env['LV2DIR']=normpath(conf.env['LV2DIR']) + if Options.options.docs: + doxygen=conf.find_program('doxygen') + if not doxygen: + conf.fatal("Doxygen is required to build with --docs") + dot=conf.find_program('dot') + if not dot: + conf.fatal("Graphviz (dot) is required to build with --docs") + if Options.options.debug: + if conf.env['MSVC_COMPILER']: + conf.env['CFLAGS']=['/Od','/Zi','/MTd'] + conf.env['CXXFLAGS']=['/Od','/Zi','/MTd'] + conf.env['LINKFLAGS']=['/DEBUG'] + else: + conf.env['CFLAGS']=['-O0','-g'] + conf.env['CXXFLAGS']=['-O0','-g'] + else: + if conf.env['MSVC_COMPILER']: + conf.env['CFLAGS']=['/MD'] + conf.env['CXXFLAGS']=['/MD'] + append_cxx_flags(['-DNDEBUG']) + if Options.options.ultra_strict: + Options.options.strict=True + conf.env.append_value('CFLAGS',['-Wredundant-decls','-Wstrict-prototypes','-Wmissing-prototypes','-Wcast-qual']) + conf.env.append_value('CXXFLAGS',['-Wcast-qual']) + if Options.options.strict: + conf.env.append_value('CFLAGS',['-pedantic','-Wshadow']) + conf.env.append_value('CXXFLAGS',['-ansi','-Wnon-virtual-dtor','-Woverloaded-virtual']) + append_cxx_flags(['-Wall','-Wcast-align','-Wextra','-Wmissing-declarations','-Wno-unused-parameter','-Wstrict-overflow','-Wundef','-Wwrite-strings','-fstrict-overflow']) + if not conf.check_cc(fragment=''' +#ifndef __clang__ +#error +#endif +int main() { return 0; }''',features='c',mandatory=False,execute=False,msg='Checking for clang'): + append_cxx_flags(['-Wlogical-op','-Wsuggest-attribute=noreturn','-Wunsafe-loop-optimizations']) + if not conf.env['MSVC_COMPILER']: + append_cxx_flags(['-fshow-column']) + conf.env.prepend_value('CFLAGS','-I'+os.path.abspath('.')) + conf.env.prepend_value('CXXFLAGS','-I'+os.path.abspath('.')) + display_msg(conf,"Install prefix",conf.env['PREFIX']) + display_msg(conf,"Debuggable build",str(conf.env['DEBUG'])) + display_msg(conf,"Build documentation",str(conf.env['DOCS'])) + print('') + g_step=2 +def set_c99_mode(conf): + if conf.env.MSVC_COMPILER: + conf.env.append_unique('CFLAGS',['-TP']) + else: + conf.env.append_unique('CFLAGS',['-std=c99']) +def set_local_lib(conf,name,has_objects): + var_name='HAVE_'+nameify(name.upper()) + define(conf,var_name,1) + if has_objects: + if type(conf.env['AUTOWAF_LOCAL_LIBS'])!=dict: + conf.env['AUTOWAF_LOCAL_LIBS']={} + conf.env['AUTOWAF_LOCAL_LIBS'][name.lower()]=True + else: + if type(conf.env['AUTOWAF_LOCAL_HEADERS'])!=dict: + conf.env['AUTOWAF_LOCAL_HEADERS']={} + conf.env['AUTOWAF_LOCAL_HEADERS'][name.lower()]=True +def append_property(obj,key,val): + if hasattr(obj,key): + setattr(obj,key,getattr(obj,key)+val) + else: + setattr(obj,key,val) +def use_lib(bld,obj,libs): + abssrcdir=os.path.abspath('.') + libs_list=libs.split() + for l in libs_list: + in_headers=l.lower()in bld.env['AUTOWAF_LOCAL_HEADERS'] + in_libs=l.lower()in bld.env['AUTOWAF_LOCAL_LIBS'] + if in_libs: + append_property(obj,'use',' lib%s '%l.lower()) + append_property(obj,'framework',bld.env['FRAMEWORK_'+l]) + if in_headers or in_libs: + inc_flag='-iquote '+os.path.join(abssrcdir,l.lower()) + for f in['CFLAGS','CXXFLAGS']: + if not inc_flag in bld.env[f]: + bld.env.prepend_value(f,inc_flag) + else: + append_property(obj,'uselib',' '+l) +@feature('c','cxx') +@before('apply_link') +def version_lib(self): + if sys.platform=='win32': + self.vnum=None + if self.env['PARDEBUG']: + applicable=['cshlib','cxxshlib','cstlib','cxxstlib'] + if[x for x in applicable if x in self.features]: + self.target=self.target+'D' +def set_lib_env(conf,name,version): + 'Set up environment for local library as if found via pkg-config.' + NAME=name.upper() + major_ver=version.split('.')[0] + pkg_var_name='PKG_'+name.replace('-','_')+'_'+major_ver + lib_name='%s-%s'%(name,major_ver) + if conf.env.PARDEBUG: + lib_name+='D' + conf.env[pkg_var_name]=lib_name + conf.env['INCLUDES_'+NAME]=['${INCLUDEDIR}/%s-%s'%(name,major_ver)] + conf.env['LIBPATH_'+NAME]=[conf.env.LIBDIR] + conf.env['LIB_'+NAME]=[lib_name] +def display_header(title): + Logs.pprint('BOLD',title) +def display_msg(conf,msg,status=None,color=None): + color='CYAN' + if type(status)==bool and status or status=="True": + color='GREEN' + elif type(status)==bool and not status or status=="False": + color='YELLOW' + Logs.pprint('BOLD'," *",sep='') + Logs.pprint('NORMAL',"%s"%msg.ljust(conf.line_just-3),sep='') + Logs.pprint('BOLD',":",sep='') + Logs.pprint(color,status) +def link_flags(env,lib): + return' '.join(map(lambda x:env['LIB_ST']%x,env['LIB_'+lib])) +def compile_flags(env,lib): + return' '.join(map(lambda x:env['CPPPATH_ST']%x,env['INCLUDES_'+lib])) +def set_recursive(): + global g_is_child + g_is_child=True +def is_child(): + global g_is_child + return g_is_child +def build_pc(bld,name,version,version_suffix,libs,subst_dict={}): + '''Build a pkg-config file for a library. + name -- uppercase variable name (e.g. 'SOMENAME') + version -- version string (e.g. '1.2.3') + version_suffix -- name version suffix (e.g. '2') + libs -- string/list of dependencies (e.g. 'LIBFOO GLIB') + ''' + pkg_prefix=bld.env['PREFIX'] + if pkg_prefix[-1]=='/': + pkg_prefix=pkg_prefix[:-1] + target=name.lower() + if version_suffix!='': + target+='-'+version_suffix + if bld.env['PARDEBUG']: + target+='D' + target+='.pc' + libdir=bld.env['LIBDIR'] + if libdir.startswith(pkg_prefix): + libdir=libdir.replace(pkg_prefix,'${exec_prefix}') + includedir=bld.env['INCLUDEDIR'] + if includedir.startswith(pkg_prefix): + includedir=includedir.replace(pkg_prefix,'${prefix}') + obj=bld(features='subst',source='%s.pc.in'%name.lower(),target=target,install_path=os.path.join(bld.env['LIBDIR'],'pkgconfig'),exec_prefix='${prefix}',PREFIX=pkg_prefix,EXEC_PREFIX='${prefix}',LIBDIR=libdir,INCLUDEDIR=includedir) + if type(libs)!=list: + libs=libs.split() + subst_dict[name+'_VERSION']=version + subst_dict[name+'_MAJOR_VERSION']=version[0:version.find('.')] + for i in libs: + subst_dict[i+'_LIBS']=link_flags(bld.env,i) + lib_cflags=compile_flags(bld.env,i) + if lib_cflags=='': + lib_cflags=' ' + subst_dict[i+'_CFLAGS']=lib_cflags + obj.__dict__.update(subst_dict) +def build_dir(name,subdir): + if is_child(): + return os.path.join('build',name,subdir) + else: + return os.path.join('build',subdir) +def make_simple_dox(name): + name=name.lower() + NAME=name.upper() + try: + top=os.getcwd() + os.chdir(build_dir(name,'doc/html')) + page='group__%s.html'%name + if not os.path.exists(page): + return + for i in[['%s_API '%NAME,''],['%s_DEPRECATED '%NAME,''],['group__%s.html'%name,''],[' ',''],['<script.*><\/script>',''],['<hr\/><a name="details" id="details"><\/a><h2>.*<\/h2>',''],['<link href=\"tabs.css\" rel=\"stylesheet\" type=\"text\/css\"\/>',''],['<img class=\"footer\" src=\"doxygen.png\" alt=\"doxygen\"\/>','Doxygen']]: + os.system("sed -i 's/%s/%s/g' %s"%(i[0],i[1],page)) + os.rename('group__%s.html'%name,'index.html') + for i in(glob.glob('*.png')+glob.glob('*.html')+glob.glob('*.js')+glob.glob('*.css')): + if i!='index.html'and i!='style.css': + os.remove(i) + os.chdir(top) + os.chdir(build_dir(name,'doc/man/man3')) + for i in glob.glob('*.3'): + os.system("sed -i 's/%s_API //' %s"%(NAME,i)) + for i in glob.glob('_*'): + os.remove(i) + os.chdir(top) + except Exception as e: + Logs.error("Failed to fix up %s documentation: %s"%(name,e)) +def build_dox(bld,name,version,srcdir,blddir,outdir='',versioned=True): + if not bld.env['DOCS']: + return + if is_child(): + src_dir=os.path.join(srcdir,name.lower()) + doc_dir=os.path.join(blddir,name.lower(),'doc') + else: + src_dir=srcdir + doc_dir=os.path.join(blddir,'doc') + subst_tg=bld(features='subst',source='doc/reference.doxygen.in',target='doc/reference.doxygen',install_path='',name='doxyfile') + subst_dict={name+'_VERSION':version,name+'_SRCDIR':os.path.abspath(src_dir),name+'_DOC_DIR':os.path.abspath(doc_dir)} + subst_tg.__dict__.update(subst_dict) + subst_tg.post() + docs=bld(features='doxygen',doxyfile='doc/reference.doxygen') + docs.post() + outname=name.lower() + if versioned: + outname+='-%d'%int(version[0:version.find('.')]) + bld.install_files(os.path.join('${DOCDIR}',outname,outdir,'html'),bld.path.get_bld().ant_glob('doc/html/*')) + for i in range(1,8): + bld.install_files('${MANDIR}/man%d'%i,bld.path.get_bld().ant_glob('doc/man/man%d/*'%i,excl='**/_*')) +def build_version_files(header_path,source_path,domain,major,minor,micro): + header_path=os.path.abspath(header_path) + source_path=os.path.abspath(source_path) + text="int "+domain+"_major_version = "+str(major)+";\n" + text+="int "+domain+"_minor_version = "+str(minor)+";\n" + text+="int "+domain+"_micro_version = "+str(micro)+";\n" + try: + o=open(source_path,'w') + o.write(text) + o.close() + except IOError: + Logs.error('Failed to open %s for writing\n'%source_path) + sys.exit(-1) + text="#ifndef __"+domain+"_version_h__\n" + text+="#define __"+domain+"_version_h__\n" + text+="extern const char* "+domain+"_revision;\n" + text+="extern int "+domain+"_major_version;\n" + text+="extern int "+domain+"_minor_version;\n" + text+="extern int "+domain+"_micro_version;\n" + text+="#endif /* __"+domain+"_version_h__ */\n" + try: + o=open(header_path,'w') + o.write(text) + o.close() + except IOError: + Logs.warn('Failed to open %s for writing\n'%header_path) + sys.exit(-1) + return None +def build_i18n_pot(bld,srcdir,dir,name,sources,copyright_holder=None): + Logs.info('Generating pot file from %s'%name) + pot_file='%s.pot'%name + cmd=['xgettext','--keyword=_','--keyword=N_','--keyword=S_','--from-code=UTF-8','-o',pot_file] + if copyright_holder: + cmd+=['--copyright-holder="%s"'%copyright_holder] + cmd+=sources + Logs.info('Updating '+pot_file) + subprocess.call(cmd,cwd=os.path.join(srcdir,dir)) +def build_i18n_po(bld,srcdir,dir,name,sources,copyright_holder=None): + pwd=os.getcwd() + os.chdir(os.path.join(srcdir,dir)) + pot_file='%s.pot'%name + po_files=glob.glob('po/*.po') + for po_file in po_files: + cmd=['msgmerge','--update',po_file,pot_file] + Logs.info('Updating '+po_file) + subprocess.call(cmd) + os.chdir(pwd) +def build_i18n_mo(bld,srcdir,dir,name,sources,copyright_holder=None): + pwd=os.getcwd() + os.chdir(os.path.join(srcdir,dir)) + pot_file='%s.pot'%name + po_files=glob.glob('po/*.po') + for po_file in po_files: + mo_file=po_file.replace('.po','.mo') + cmd=['msgfmt','-c','-f','-o',mo_file,po_file] + Logs.info('Generating '+po_file) + subprocess.call(cmd) + os.chdir(pwd) +def build_i18n(bld,srcdir,dir,name,sources,copyright_holder=None): + build_i18n_pot(bld,srcdir,dir,name,sources,copyright_holder) + build_i18n_po(bld,srcdir,dir,name,sources,copyright_holder) + build_i18n_mo(bld,srcdir,dir,name,sources,copyright_holder) +def cd_to_build_dir(ctx,appname): + orig_dir=os.path.abspath(os.curdir) + top_level=(len(ctx.stack_path)>1) + if top_level: + os.chdir(os.path.join('build',appname)) + else: + os.chdir('build') + Logs.pprint('GREEN',"Waf: Entering directory `%s'"%os.path.abspath(os.getcwd())) +def cd_to_orig_dir(ctx,child): + if child: + os.chdir(os.path.join('..','..')) + else: + os.chdir('..') +def pre_test(ctx,appname,dirs=['src']): + diropts='' + for i in dirs: + diropts+=' -d '+i + cd_to_build_dir(ctx,appname) + clear_log=open('lcov-clear.log','w') + try: + try: + subprocess.call(('lcov %s -z'%diropts).split(),stdout=clear_log,stderr=clear_log) + except: + Logs.warn('Failed to run lcov, no coverage report will be generated') + finally: + clear_log.close() +def post_test(ctx,appname,dirs=['src'],remove=['*boost*','c++*']): + diropts='' + for i in dirs: + diropts+=' -d '+i + coverage_log=open('lcov-coverage.log','w') + coverage_lcov=open('coverage.lcov','w') + coverage_stripped_lcov=open('coverage-stripped.lcov','w') + try: + try: + base='.' + if g_is_child: + base='..' + subprocess.call(('lcov -c %s -b %s'%(diropts,base)).split(),stdout=coverage_lcov,stderr=coverage_log) + subprocess.call(['lcov','--remove','coverage.lcov']+remove,stdout=coverage_stripped_lcov,stderr=coverage_log) + if not os.path.isdir('coverage'): + os.makedirs('coverage') + subprocess.call('genhtml -o coverage coverage-stripped.lcov'.split(),stdout=coverage_log,stderr=coverage_log) + except: + Logs.warn('Failed to run lcov, no coverage report will be generated') + finally: + coverage_stripped_lcov.close() + coverage_lcov.close() + coverage_log.close() + print('') + Logs.pprint('GREEN',"Waf: Leaving directory `%s'"%os.path.abspath(os.getcwd())) + top_level=(len(ctx.stack_path)>1) + if top_level: + cd_to_orig_dir(ctx,top_level) + print('') + Logs.pprint('BOLD','Coverage:',sep='') + print('<file://%s>\n\n'%os.path.abspath('coverage/index.html')) +def run_test(ctx,appname,test,desired_status=0,dirs=['src'],name='',header=False): + s=test + if type(test)==type([]): + s=' '.join(i) + if header: + Logs.pprint('BOLD','** Test',sep='') + Logs.pprint('NORMAL','%s'%s) + cmd=test + if Options.options.grind: + cmd='valgrind '+test + if subprocess.call(cmd,shell=True)==desired_status: + Logs.pprint('GREEN','** Pass %s'%name) + return True + else: + Logs.pprint('RED','** FAIL %s'%name) + return False +def run_tests(ctx,appname,tests,desired_status=0,dirs=['src'],name='*',headers=False): + failures=0 + diropts='' + for i in dirs: + diropts+=' -d '+i + for i in tests: + if not run_test(ctx,appname,i,desired_status,dirs,i,headers): + failures+=1 + print('') + if failures==0: + Logs.pprint('GREEN','** Pass: All %s.%s tests passed'%(appname,name)) + else: + Logs.pprint('RED','** FAIL: %d %s.%s tests failed'%(failures,appname,name)) +def run_ldconfig(ctx): + if(ctx.cmd=='install'and not ctx.env['RAN_LDCONFIG']and ctx.env['LIBDIR']and not'DESTDIR'in os.environ and not Options.options.destdir): + try: + Logs.info("Waf: Running `/sbin/ldconfig %s'"%ctx.env['LIBDIR']) + subprocess.call(['/sbin/ldconfig',ctx.env['LIBDIR']]) + ctx.env['RAN_LDCONFIG']=True + except: + pass +def write_news(name,in_files,out_file,top_entries=None,extra_entries=None): + import rdflib + import textwrap + from time import strftime,strptime + doap=rdflib.Namespace('http://usefulinc.com/ns/doap#') + dcs=rdflib.Namespace('http://ontologi.es/doap-changeset#') + rdfs=rdflib.Namespace('http://www.w3.org/2000/01/rdf-schema#') + foaf=rdflib.Namespace('http://xmlns.com/foaf/0.1/') + rdf=rdflib.Namespace('http://www.w3.org/1999/02/22-rdf-syntax-ns#') + m=rdflib.ConjunctiveGraph() + try: + for i in in_files: + m.parse(i,format='n3') + except: + Logs.warn('Error parsing data, unable to generate NEWS') + return + proj=m.value(None,rdf.type,doap.Project) + for f in m.triples([proj,rdfs.seeAlso,None]): + if f[2].endswith('.ttl'): + m.parse(f[2],format='n3') + entries={} + for r in m.triples([proj,doap.release,None]): + release=r[2] + revision=m.value(release,doap.revision,None) + date=m.value(release,doap.created,None) + blamee=m.value(release,dcs.blame,None) + changeset=m.value(release,dcs.changeset,None) + dist=m.value(release,doap['file-release'],None) + if revision and date and blamee and changeset: + entry='%s (%s) stable;\n'%(name,revision) + for i in m.triples([changeset,dcs.item,None]): + item=textwrap.wrap(m.value(i[2],rdfs.label,None),width=79) + entry+='\n * '+'\n '.join(item) + if dist and top_entries is not None: + if not str(dist)in top_entries: + top_entries[str(dist)]=[] + top_entries[str(dist)]+=['%s: %s'%(name,'\n '.join(item))] + if extra_entries: + for i in extra_entries[str(dist)]: + entry+='\n * '+i + entry+='\n\n --' + blamee_name=m.value(blamee,foaf.name,None) + blamee_mbox=m.value(blamee,foaf.mbox,None) + if blamee_name and blamee_mbox: + entry+=' %s <%s>'%(blamee_name,blamee_mbox.replace('mailto:','')) + entry+=' %s\n\n'%(strftime('%a, %d %b %Y %H:%M:%S +0000',strptime(date,'%Y-%m-%d'))) + entries[(date,revision)]=entry + else: + Logs.warn('Ignored incomplete %s release description'%name) + if len(entries)>0: + news=open(out_file,'w') + for e in sorted(entries.keys(),reverse=True): + news.write(entries[e]) + news.close() diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/extras/doxygen.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/extras/doxygen.py new file mode 100644 index 0000000..ea5da7c --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/extras/doxygen.py @@ -0,0 +1,148 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +from fnmatch import fnmatchcase +import os,os.path,re,stat +from waflib import Task,Utils,Node,Logs +from waflib.TaskGen import feature +DOXY_STR='${DOXYGEN} - ' +DOXY_FMTS='html latex man rft xml'.split() +DOXY_FILE_PATTERNS='*.'+' *.'.join(''' +c cc cxx cpp c++ java ii ixx ipp i++ inl h hh hxx hpp h++ idl odl cs php php3 +inc m mm py f90c cc cxx cpp c++ java ii ixx ipp i++ inl h hh hxx +'''.split()) +re_rl=re.compile('\\\\\r*\n',re.MULTILINE) +re_nl=re.compile('\r*\n',re.M) +def parse_doxy(txt): + tbl={} + txt=re_rl.sub('',txt) + lines=re_nl.split(txt) + for x in lines: + x=x.strip() + if not x or x.startswith('#')or x.find('=')<0: + continue + if x.find('+=')>=0: + tmp=x.split('+=') + key=tmp[0].strip() + if key in tbl: + tbl[key]+=' '+'+='.join(tmp[1:]).strip() + else: + tbl[key]='+='.join(tmp[1:]).strip() + else: + tmp=x.split('=') + tbl[tmp[0].strip()]='='.join(tmp[1:]).strip() + return tbl +class doxygen(Task.Task): + vars=['DOXYGEN','DOXYFLAGS'] + color='BLUE' + def runnable_status(self): + ''' + self.pars are populated in runnable_status - because this function is being + run *before* both self.pars "consumers" - scan() and run() + + set output_dir (node) for the output + ''' + for x in self.run_after: + if not x.hasrun: + return Task.ASK_LATER + if not getattr(self,'pars',None): + txt=self.inputs[0].read() + self.pars=parse_doxy(txt) + if not self.pars.get('OUTPUT_DIRECTORY'): + self.pars['OUTPUT_DIRECTORY']=self.inputs[0].parent.get_bld().abspath() + if getattr(self.generator,'pars',None): + for k,v in self.generator.pars.iteritems(): + self.pars[k]=v + self.doxy_inputs=getattr(self,'doxy_inputs',[]) + if not self.pars.get('INPUT'): + self.doxy_inputs.append(self.inputs[0].parent) + else: + for i in self.pars.get('INPUT').split(): + if os.path.isabs(i): + node=self.generator.bld.root.find_node(i) + else: + node=self.generator.path.find_node(i) + if not node: + self.generator.bld.fatal('Could not find the doxygen input %r'%i) + self.doxy_inputs.append(node) + if not getattr(self,'output_dir',None): + bld=self.generator.bld + self.output_dir=bld.root.find_dir(self.pars['OUTPUT_DIRECTORY']) + if not self.output_dir: + self.output_dir=bld.path.find_or_declare(self.pars['OUTPUT_DIRECTORY']) + self.signature() + return Task.Task.runnable_status(self) + def scan(self): + if self.pars.get('RECURSIVE')=='YES': + Logs.warn("Doxygen RECURSIVE dependencies are not supported") + exclude_patterns=self.pars.get('EXCLUDE_PATTERNS','').split() + file_patterns=self.pars.get('FILE_PATTERNS','').split() + if not file_patterns: + file_patterns=DOXY_FILE_PATTERNS + nodes=[] + names=[] + for node in self.doxy_inputs: + if os.path.isdir(node.abspath()): + for m in node.ant_glob(file_patterns): + nodes.append(m) + else: + nodes.append(node) + return(nodes,names) + def run(self): + dct=self.pars.copy() + dct['INPUT']=' '.join([x.abspath()for x in self.doxy_inputs]) + code='\n'.join(['%s = %s'%(x,dct[x])for x in self.pars]) + code=code.encode() + cmd=Utils.subst_vars(DOXY_STR,self.env) + env=self.env.env or None + proc=Utils.subprocess.Popen(cmd,shell=True,stdin=Utils.subprocess.PIPE,env=env,cwd=self.generator.bld.path.get_bld().abspath()) + proc.communicate(code) + return proc.returncode + def post_run(self): + nodes=self.output_dir.ant_glob('**/*',quiet=True) + for x in nodes: + x.sig=Utils.h_file(x.abspath()) + self.outputs+=nodes + return Task.Task.post_run(self) +class tar(Task.Task): + run_str='${TAR} ${TAROPTS} ${TGT} ${SRC}' + color='RED' + after=['doxygen'] + def runnable_status(self): + for x in getattr(self,'input_tasks',[]): + if not x.hasrun: + return Task.ASK_LATER + if not getattr(self,'tar_done_adding',None): + self.tar_done_adding=True + for x in getattr(self,'input_tasks',[]): + self.set_inputs(x.outputs) + if not self.inputs: + return Task.SKIP_ME + return Task.Task.runnable_status(self) + def __str__(self): + tgt_str=' '.join([a.nice_path(self.env)for a in self.outputs]) + return'%s: %s\n'%(self.__class__.__name__,tgt_str) +@feature('doxygen') +def process_doxy(self): + if not getattr(self,'doxyfile',None): + self.generator.bld.fatal('no doxyfile??') + node=self.doxyfile + if not isinstance(node,Node.Node): + node=self.path.find_resource(node) + if not node: + raise ValueError('doxygen file not found') + dsk=self.create_task('doxygen',node) + if getattr(self,'doxy_tar',None): + tsk=self.create_task('tar') + tsk.input_tasks=[dsk] + tsk.set_outputs(self.path.find_or_declare(self.doxy_tar)) + if self.doxy_tar.endswith('bz2'): + tsk.env['TAROPTS']=['cjf'] + elif self.doxy_tar.endswith('gz'): + tsk.env['TAROPTS']=['czf'] + else: + tsk.env['TAROPTS']=['cf'] +def configure(conf): + conf.find_program('doxygen',var='DOXYGEN') + conf.find_program('tar',var='TAR') diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/extras/swig.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/extras/swig.py new file mode 100644 index 0000000..4b6f50b --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/extras/swig.py @@ -0,0 +1,120 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import re +from waflib import Task,Utils,Logs +from waflib.TaskGen import extension,feature,after_method +from waflib.Configure import conf +from waflib.Tools import c_preproc +SWIG_EXTS=['.swig','.i'] +re_module=re.compile('%module(?:\s*\(.*\))?\s+(.+)',re.M) +re_1=re.compile(r'^%module.*?\s+([\w]+)\s*?$',re.M) +re_2=re.compile('%include "(.*)"',re.M) +re_3=re.compile('#include "(.*)"',re.M) +class swig(Task.Task): + color='BLUE' + run_str='${SWIG} ${SWIGFLAGS} ${SWIGPATH_ST:INCPATHS} ${SWIGDEF_ST:DEFINES} ${SRC}' + ext_out=['.h'] + vars=['SWIG_VERSION','SWIGDEPS'] + def runnable_status(self): + for t in self.run_after: + if not t.hasrun: + return Task.ASK_LATER + if not getattr(self,'init_outputs',None): + self.init_outputs=True + if not getattr(self,'module',None): + txt=self.inputs[0].read() + m=re_module.search(txt) + if not m: + raise ValueError("could not find the swig module name") + self.module=m.group(1) + swig_c(self) + for x in self.env['SWIGFLAGS']: + x=x[1:] + try: + fun=swig_langs[x] + except KeyError: + pass + else: + fun(self) + return super(swig,self).runnable_status() + def scan(self): + env=self.env + lst_src=[] + seen=[] + to_see=[self.inputs[0]] + while to_see: + node=to_see.pop(0) + if node in seen: + continue + seen.append(node) + lst_src.append(node) + code=node.read() + code=c_preproc.re_nl.sub('',code) + code=c_preproc.re_cpp.sub(c_preproc.repl,code) + names=re_2.findall(code)+re_3.findall(code) + for n in names: + for d in self.generator.includes_nodes+[node.parent]: + u=d.find_resource(n) + if u: + to_see.append(u) + break + else: + Logs.warn('could not find %r'%n) + return(lst_src,[]) +swig_langs={} +def swigf(fun): + swig_langs[fun.__name__.replace('swig_','')]=fun +swig.swigf=swigf +def swig_c(self): + ext='.swigwrap_%d.c'%self.generator.idx + flags=self.env['SWIGFLAGS'] + if'-c++'in flags: + ext+='xx' + out_node=self.inputs[0].parent.find_or_declare(self.module+ext) + if'-c++'in flags: + c_tsk=self.generator.cxx_hook(out_node) + else: + c_tsk=self.generator.c_hook(out_node) + c_tsk.set_run_after(self) + ge=self.generator.bld.producer + ge.outstanding.insert(0,c_tsk) + ge.total+=1 + try: + ltask=self.generator.link_task + except AttributeError: + pass + else: + ltask.set_run_after(c_tsk) + ltask.inputs.append(c_tsk.outputs[0]) + self.outputs.append(out_node) + if not'-o'in self.env['SWIGFLAGS']: + self.env.append_value('SWIGFLAGS',['-o',self.outputs[0].abspath()]) +@swigf +def swig_python(tsk): + tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module+'.py')) +@swigf +def swig_ocaml(tsk): + tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module+'.ml')) + tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module+'.mli')) +@extension(*SWIG_EXTS) +def i_file(self,node): + tsk=self.create_task('swig') + tsk.set_inputs(node) + tsk.module=getattr(self,'swig_module',None) + flags=self.to_list(getattr(self,'swig_flags',[])) + tsk.env.append_value('SWIGFLAGS',flags) +@conf +def check_swig_version(self): + reg_swig=re.compile(r'SWIG Version\s(.*)',re.M) + swig_out=self.cmd_and_log('%s -version'%self.env['SWIG']) + swigver=tuple([int(s)for s in reg_swig.findall(swig_out)[0].split('.')]) + self.env['SWIG_VERSION']=swigver + msg='Checking for swig version' + self.msg(msg,'.'.join(map(str,swigver))) + return swigver +def configure(conf): + swig=conf.find_program('swig',var='SWIG') + conf.env.SWIGPATH_ST='-I%s' + conf.env.SWIGDEF_ST='-D%s' diff --git a/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/fixpy2.py b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/fixpy2.py new file mode 100644 index 0000000..dd058e2 --- /dev/null +++ b/.waf3-1.7.16-0356ded4079f8d9c0828a065ed2fdab1/waflib/fixpy2.py @@ -0,0 +1,53 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file + +import os +all_modifs={} +def fixdir(dir): + global all_modifs + for k in all_modifs: + for v in all_modifs[k]: + modif(os.path.join(dir,'waflib'),k,v) +def modif(dir,name,fun): + if name=='*': + lst=[] + for y in'. Tools extras'.split(): + for x in os.listdir(os.path.join(dir,y)): + if x.endswith('.py'): + lst.append(y+os.sep+x) + for x in lst: + modif(dir,x,fun) + return + filename=os.path.join(dir,name) + f=open(filename,'r') + try: + txt=f.read() + finally: + f.close() + txt=fun(txt) + f=open(filename,'w') + try: + f.write(txt) + finally: + f.close() +def subst(*k): + def do_subst(fun): + global all_modifs + for x in k: + try: + all_modifs[x].append(fun) + except KeyError: + all_modifs[x]=[fun] + return fun + return do_subst +@subst('*') +def r1(code): + code=code.replace('as e:',',e:') + code=code.replace(".decode(sys.stdout.encoding or 'iso8859-1')",'') + code=code.replace('.encode()','') + return code +@subst('Runner.py') +def r4(code): + code=code.replace('next(self.biter)','self.biter.next()') + return code @@ -0,0 +1,59 @@ +Installation Instructions +========================= + +Basic Installation +------------------ + +Building this software requires only Python. To install with default options: + + ./waf configure + ./waf + ./waf install + +You may need to become root for the install stage, for example: + + sudo ./waf install + +Configuration Options +--------------------- + +All supported options can be viewed using the command: + + ./waf --help + +Most options only need to be passed during the configure stage, for example: + + ./waf configure --prefix=/usr + ./waf + ./waf install + +Compiler Configuration +---------------------- + +Several standard environment variables can be used to control how compilers are +invoked: + + * CC: Path to C compiler + * CFLAGS: C compiler options + * CXX: Path to C++ compiler + * CXXFLAGS: C++ compiler options + * CPPFLAGS: C preprocessor options + * LINKFLAGS: Linker options + +Installation Directories +------------------------ + +The --prefix option (or the PREFIX environment variable) can be used to change +the prefix which all files are installed under. There are also several options +allowing for more fine-tuned control, see the --help output for details. + +Packaging +--------- + +Everything can be installed to a specific root directory by passing a --destdir +option to the install stage (or setting the DESTDIR environment variable), +which adds a prefix to all install paths. For example: + + ./waf configure --prefix=/usr + ./waf + ./waf install --destdir=/tmp/package diff --git a/build/.conf_check_356fd619dbd68443f6a1cba88f513648/cache_run_c_code b/build/.conf_check_356fd619dbd68443f6a1cba88f513648/cache_run_c_code new file mode 100644 index 0000000..990e234 --- /dev/null +++ b/build/.conf_check_356fd619dbd68443f6a1cba88f513648/cache_run_c_code @@ -0,0 +1 @@ +cache_run_c_code = 0 diff --git a/build/.conf_check_356fd619dbd68443f6a1cba88f513648/test.cpp b/build/.conf_check_356fd619dbd68443f6a1cba88f513648/test.cpp new file mode 100644 index 0000000..8084024 --- /dev/null +++ b/build/.conf_check_356fd619dbd68443f6a1cba88f513648/test.cpp @@ -0,0 +1,8 @@ +#include <jack/jack.h> + +int main(int argc, char **argv) { + void *p; + (void)argc; (void)argv; + p=(void*)(jack_port_type_get_buffer_size); + return 0; +} diff --git a/build/.conf_check_356fd619dbd68443f6a1cba88f513648/testbuild/.wafpickle-linux-50594288-98 b/build/.conf_check_356fd619dbd68443f6a1cba88f513648/testbuild/.wafpickle-linux-50594288-98 Binary files differnew file mode 100644 index 0000000..43e26ee --- /dev/null +++ b/build/.conf_check_356fd619dbd68443f6a1cba88f513648/testbuild/.wafpickle-linux-50594288-98 diff --git a/build/.conf_check_356fd619dbd68443f6a1cba88f513648/testbuild/test.cpp.1.o b/build/.conf_check_356fd619dbd68443f6a1cba88f513648/testbuild/test.cpp.1.o Binary files differnew file mode 100644 index 0000000..4f72b38 --- /dev/null +++ b/build/.conf_check_356fd619dbd68443f6a1cba88f513648/testbuild/test.cpp.1.o diff --git a/build/.conf_check_356fd619dbd68443f6a1cba88f513648/testbuild/testprog b/build/.conf_check_356fd619dbd68443f6a1cba88f513648/testbuild/testprog Binary files differnew file mode 100755 index 0000000..5a39341 --- /dev/null +++ b/build/.conf_check_356fd619dbd68443f6a1cba88f513648/testbuild/testprog diff --git a/build/.conf_check_623450d91353b10e839bd52fd98cd354/cache_run_c_code b/build/.conf_check_623450d91353b10e839bd52fd98cd354/cache_run_c_code new file mode 100644 index 0000000..990e234 --- /dev/null +++ b/build/.conf_check_623450d91353b10e839bd52fd98cd354/cache_run_c_code @@ -0,0 +1 @@ +cache_run_c_code = 0 diff --git a/build/.conf_check_623450d91353b10e839bd52fd98cd354/test.cpp b/build/.conf_check_623450d91353b10e839bd52fd98cd354/test.cpp new file mode 100644 index 0000000..a231a1d --- /dev/null +++ b/build/.conf_check_623450d91353b10e839bd52fd98cd354/test.cpp @@ -0,0 +1,8 @@ +#include <jack/metadata.h> + +int main(int argc, char **argv) { + void *p; + (void)argc; (void)argv; + p=(void*)(jack_set_property); + return 0; +} diff --git a/build/.conf_check_623450d91353b10e839bd52fd98cd354/testbuild/.wafpickle-linux2-34015472-98 b/build/.conf_check_623450d91353b10e839bd52fd98cd354/testbuild/.wafpickle-linux2-34015472-98 Binary files differnew file mode 100644 index 0000000..877d270 --- /dev/null +++ b/build/.conf_check_623450d91353b10e839bd52fd98cd354/testbuild/.wafpickle-linux2-34015472-98 diff --git a/build/.conf_check_623450d91353b10e839bd52fd98cd354/testbuild/test.cpp.1.o b/build/.conf_check_623450d91353b10e839bd52fd98cd354/testbuild/test.cpp.1.o Binary files differnew file mode 100644 index 0000000..6510d5e --- /dev/null +++ b/build/.conf_check_623450d91353b10e839bd52fd98cd354/testbuild/test.cpp.1.o diff --git a/build/.conf_check_623450d91353b10e839bd52fd98cd354/testbuild/testprog b/build/.conf_check_623450d91353b10e839bd52fd98cd354/testbuild/testprog Binary files differnew file mode 100755 index 0000000..fcba65b --- /dev/null +++ b/build/.conf_check_623450d91353b10e839bd52fd98cd354/testbuild/testprog diff --git a/build/.conf_check_65ce00a7c239cff55a182e15e488e8c2/cache_run_c_code b/build/.conf_check_65ce00a7c239cff55a182e15e488e8c2/cache_run_c_code new file mode 100644 index 0000000..990e234 --- /dev/null +++ b/build/.conf_check_65ce00a7c239cff55a182e15e488e8c2/cache_run_c_code @@ -0,0 +1 @@ +cache_run_c_code = 0 diff --git a/build/.conf_check_65ce00a7c239cff55a182e15e488e8c2/test.cpp b/build/.conf_check_65ce00a7c239cff55a182e15e488e8c2/test.cpp new file mode 100644 index 0000000..a231a1d --- /dev/null +++ b/build/.conf_check_65ce00a7c239cff55a182e15e488e8c2/test.cpp @@ -0,0 +1,8 @@ +#include <jack/metadata.h> + +int main(int argc, char **argv) { + void *p; + (void)argc; (void)argv; + p=(void*)(jack_set_property); + return 0; +} diff --git a/build/.conf_check_65ce00a7c239cff55a182e15e488e8c2/testbuild/.wafpickle-linux-50594288-98 b/build/.conf_check_65ce00a7c239cff55a182e15e488e8c2/testbuild/.wafpickle-linux-50594288-98 Binary files differnew file mode 100644 index 0000000..65aba4d --- /dev/null +++ b/build/.conf_check_65ce00a7c239cff55a182e15e488e8c2/testbuild/.wafpickle-linux-50594288-98 diff --git a/build/.conf_check_65ce00a7c239cff55a182e15e488e8c2/testbuild/test.cpp.1.o b/build/.conf_check_65ce00a7c239cff55a182e15e488e8c2/testbuild/test.cpp.1.o Binary files differnew file mode 100644 index 0000000..aaeda15 --- /dev/null +++ b/build/.conf_check_65ce00a7c239cff55a182e15e488e8c2/testbuild/test.cpp.1.o diff --git a/build/.conf_check_65ce00a7c239cff55a182e15e488e8c2/testbuild/testprog b/build/.conf_check_65ce00a7c239cff55a182e15e488e8c2/testbuild/testprog Binary files differnew file mode 100755 index 0000000..805a832 --- /dev/null +++ b/build/.conf_check_65ce00a7c239cff55a182e15e488e8c2/testbuild/testprog diff --git a/build/.conf_check_af37a22df4c19cf0432d3404a6b219d1/cache_run_c_code b/build/.conf_check_af37a22df4c19cf0432d3404a6b219d1/cache_run_c_code new file mode 100644 index 0000000..990e234 --- /dev/null +++ b/build/.conf_check_af37a22df4c19cf0432d3404a6b219d1/cache_run_c_code @@ -0,0 +1 @@ +cache_run_c_code = 0 diff --git a/build/.conf_check_af37a22df4c19cf0432d3404a6b219d1/test.cpp b/build/.conf_check_af37a22df4c19cf0432d3404a6b219d1/test.cpp new file mode 100644 index 0000000..8084024 --- /dev/null +++ b/build/.conf_check_af37a22df4c19cf0432d3404a6b219d1/test.cpp @@ -0,0 +1,8 @@ +#include <jack/jack.h> + +int main(int argc, char **argv) { + void *p; + (void)argc; (void)argv; + p=(void*)(jack_port_type_get_buffer_size); + return 0; +} diff --git a/build/.conf_check_af37a22df4c19cf0432d3404a6b219d1/testbuild/.wafpickle-linux2-34015472-98 b/build/.conf_check_af37a22df4c19cf0432d3404a6b219d1/testbuild/.wafpickle-linux2-34015472-98 Binary files differnew file mode 100644 index 0000000..882cb23 --- /dev/null +++ b/build/.conf_check_af37a22df4c19cf0432d3404a6b219d1/testbuild/.wafpickle-linux2-34015472-98 diff --git a/build/.conf_check_af37a22df4c19cf0432d3404a6b219d1/testbuild/test.cpp.1.o b/build/.conf_check_af37a22df4c19cf0432d3404a6b219d1/testbuild/test.cpp.1.o Binary files differnew file mode 100644 index 0000000..5ad9471 --- /dev/null +++ b/build/.conf_check_af37a22df4c19cf0432d3404a6b219d1/testbuild/test.cpp.1.o diff --git a/build/.conf_check_af37a22df4c19cf0432d3404a6b219d1/testbuild/testprog b/build/.conf_check_af37a22df4c19cf0432d3404a6b219d1/testbuild/testprog Binary files differnew file mode 100755 index 0000000..35a8953 --- /dev/null +++ b/build/.conf_check_af37a22df4c19cf0432d3404a6b219d1/testbuild/testprog diff --git a/build/.lock-waf_linux2_build b/build/.lock-waf_linux2_build new file mode 100644 index 0000000..c089c9c --- /dev/null +++ b/build/.lock-waf_linux2_build @@ -0,0 +1,8 @@ +argv = ['./waf', 'configure'] +environ = {'WINDOWID': '20971526', 'INFINALITY_FT_FRINGE_FILTER_STRENGTH': '50', 'INFINALITY_FT_USE_VARIOUS_TWEAKS': 'true', 'LOGNAME': 'pepper', 'USER': 'pepper', 'PATH': '/home/pepper/.gem/ruby/2.1.0/bin:/home/pepper/.gem/ruby/2.1.0/bin:/usr/local/bin:/usr/bin:/bin:/usr/local/sbin:/usr/sbin:/sbin:/usr/firstscripts:/root/.gem/ruby/2.1.0/bin:/opt/android-sdk/build-tools/19.0.2/:/opt/android-sdk/platform-tools:/opt/android-sdk/tools:/opt/devkitpro/devkitPPC/bin:/opt/marytts/bin:/usr/bin/site_perl:/usr/bin/vendor_perl:/usr/bin/core_perl:/opt/android-sdk/tools/:/opt/android-sdk/platform-tools/:/opt/android-sdk/tools/:/opt/android-sdk/platform-tools/', 'XDG_VTNR': '1', 'HOME': '/home/pepper', 'paul': '6172753373', 'HG': '/usr/bin/hg', 'LANG': 'en_US.UTF-8', 'TERM': 'xterm-256color', 'SHELL': '/bin/bash', 'XAUTHORITY': '/home/pepper/.Xauthority', 'SHLVL': '3', 'DBUS_SESSION_BUS_ADDRESS': 'unix:abstract=/tmp/dbus-AtdPifsQ9w,guid=1c8a4fb2d96ee041948d9c9554c5d823', 'WINDOWPATH': '1', 'EDITOR': 'vim', 'JAVA_HOME': '/usr/lib/jvm/java-7-openjdk', 'ANDROID_SWT': '/usr/share/java', 'dad': '6174598251', 'XDG_RUNTIME_DIR': '/run/user/1002', 'VTE_VERSION': '3603', 'INFINALITY_FT_FILTER_PARAMS': '06 22 36 22 06', 'ANDROID_HOME': '/opt/android-sdk', 'XDG_SESSION_ID': 'c1', 'DEVKITPRO': '/opt/devkitpro', '_': './waf', 'MOZ_PLUGIN_PATH': '/usr/lib/mozilla/plugins', 'GREP_OPTIONS': '--color=auto', 'DISPLAY': ':0', 'GTK_MODULES': 'canberra-gtk-module', 'INFINALITY_FT_CHROMEOS_STYLE_SHARPENING_STRENGTH': '20', 'OLDPWD': '/home/pepper/LV2_render/src', 'ANT_HOME': '/usr/share/apache-ant', 'DEVKITPPC': '/opt/devkitpro/devkitPPC', 'PWD': '/home/pepper/LV2_render', 'calla': '3109381479', 'COLORTERM': 'gnome-terminal', 'MAIL': '/var/spool/mail/pepper', 'XDG_SEAT': 'seat0'} +files = ['/home/pepper/LV2_render/wscript'] +hash = '~\x8a\x88\x18\xd5\xff\\\xc7\x1e+Y\xf0\xa2hK\xb4' +options = {'docdir': None, 'ultra_strict': False, 'force': False, 'verbose': 0, 'pardebug': False, 'destdir': '', 'no_jack_session': False, 'zones': '', 'prefix': '/usr/local/', 'download': False, 'grind': False, 'targets': '', 'configdir': None, 'mandir': None, 'nocache': False, 'progress_bar': 0, 'top': '', 'libdir': None, 'strict': False, 'datadir': None, 'lv2_user': False, 'out': '', 'check_cxx_compiler': 'g++ icpc', 'bindir': None, 'files': '', 'lv2_system': False, 'jobs': 4, 'docs': False, 'distcheck_args': None, 'no_qt': False, 'lv2dir': None, 'keep': 0, 'includedir': None, 'debug': False, 'check_c_compiler': 'gcc icc'} +out_dir = '/home/pepper/LV2_render/build' +run_dir = '/home/pepper/LV2_render' +top_dir = '/home/pepper/LV2_render' diff --git a/build/.lock-waf_linux_build b/build/.lock-waf_linux_build new file mode 100644 index 0000000..d07cd62 --- /dev/null +++ b/build/.lock-waf_linux_build @@ -0,0 +1,8 @@ +argv = ['./waf', 'configure'] +environ = {'TERM': 'xterm-256color', 'HOME': '/home/pepper', 'HISTFILESIZE': '100000', 'SSH_CONNECTION': '104.34.76.116 45733 199.180.249.102 22', 'PATH': '/usr/local/bin:/usr/bin:/bin:/usr/bin:/bin:/usr/sbin:/sbin:/opt/marytts/bin:/usr/bin/site_perl:/usr/bin/vendor_perl:/usr/bin/core_perl', 'XDG_RUNTIME_DIR': '/run/user/1000', 'SSH_CLIENT': '104.34.76.116 45733 22', 'SHELL': '/bin/bash', 'DINO': '/var/www/dino.fm', 'LOGNAME': 'pepper', 'LS_COLORS': 'rs=0:di=01;34:ln=01;36:mh=00:pi=40;33:so=01;35:do=01;35:bd=40;33;01:cd=40;33;01:or=40;31;01:su=37;41:sg=30;43:ca=30;41:tw=30;42:ow=34;42:st=37;44:ex=01;32:*.tar=01;31:*.tgz=01;31:*.arc=01;31:*.arj=01;31:*.taz=01;31:*.lha=01;31:*.lz4=01;31:*.lzh=01;31:*.lzma=01;31:*.tlz=01;31:*.txz=01;31:*.tzo=01;31:*.t7z=01;31:*.zip=01;31:*.z=01;31:*.Z=01;31:*.dz=01;31:*.gz=01;31:*.lrz=01;31:*.lz=01;31:*.lzo=01;31:*.xz=01;31:*.bz2=01;31:*.bz=01;31:*.tbz=01;31:*.tbz2=01;31:*.tz=01;31:*.deb=01;31:*.rpm=01;31:*.jar=01;31:*.war=01;31:*.ear=01;31:*.sar=01;31:*.rar=01;31:*.alz=01;31:*.ace=01;31:*.zoo=01;31:*.cpio=01;31:*.7z=01;31:*.rz=01;31:*.cab=01;31:*.jpg=01;35:*.jpeg=01;35:*.gif=01;35:*.bmp=01;35:*.pbm=01;35:*.pgm=01;35:*.ppm=01;35:*.tga=01;35:*.xbm=01;35:*.xpm=01;35:*.tif=01;35:*.tiff=01;35:*.png=01;35:*.svg=01;35:*.svgz=01;35:*.mng=01;35:*.pcx=01;35:*.mov=01;35:*.mpg=01;35:*.mpeg=01;35:*.m2v=01;35:*.mkv=01;35:*.webm=01;35:*.ogm=01;35:*.mp4=01;35:*.m4v=01;35:*.mp4v=01;35:*.vob=01;35:*.qt=01;35:*.nuv=01;35:*.wmv=01;35:*.asf=01;35:*.rm=01;35:*.rmvb=01;35:*.flc=01;35:*.avi=01;35:*.fli=01;35:*.flv=01;35:*.gl=01;35:*.dl=01;35:*.xcf=01;35:*.xwd=01;35:*.yuv=01;35:*.cgm=01;35:*.emf=01;35:*.axv=01;35:*.anx=01;35:*.ogv=01;35:*.ogx=01;35:*.aac=00;36:*.au=00;36:*.flac=00;36:*.mid=00;36:*.midi=00;36:*.mka=00;36:*.mp3=00;36:*.mpc=00;36:*.ogg=00;36:*.ra=00;36:*.wav=00;36:*.axa=00;36:*.oga=00;36:*.spx=00;36:*.xspf=00;36:', 'NODE_PATH': '/usr/lib/node_modules', 'PAGER': '/usr/bin/less', 'HISTSIZE': '100000', 'PWD': '/home/pepper/LV2/jalv-1.4.6', 'USER': 'pepper', 'EDITOR': '/usr/bin/vim', '_': './waf', 'SSH_TTY': '/dev/pts/5', 'MAIL': '/var/spool/mail/pepper', 'LANG': 'en_US', 'OLDPWD': '/home/pepper/LV2/jalv-1.4.6/src', 'SHLVL': '1', 'XDG_SESSION_ID': 'c1180', 'HISTCONTROL': 'ignoredups:erasedups', 'JAVA_HOME': '/usr/lib/jvm/java-7-openjdk'} +files = ['/home/pepper/LV2/jalv-1.4.6/wscript'] +hash = b'\xd5\xec~D\xdcJ\x8e\xc0qt\xde\x80$(^B' +options = {'keep': 0, 'force': False, 'check_cxx_compiler': 'g++ icpc', 'destdir': '', 'prefix': '/usr/local/', 'configdir': None, 'check_c_compiler': 'gcc icc', 'download': False, 'top': '', 'targets': '', 'verbose': 0, 'ultra_strict': False, 'jobs': 4, 'nocache': False, 'libdir': None, 'docs': False, 'datadir': None, 'distcheck_args': None, 'strict': False, 'no_qt': False, 'lv2_user': False, 'docdir': None, 'zones': '', 'grind': False, 'mandir': None, 'pardebug': False, 'no_jack_session': False, 'includedir': None, 'bindir': None, 'debug': False, 'lv2dir': None, 'out': '', 'files': '', 'lv2_system': False, 'progress_bar': 0} +out_dir = '/home/pepper/LV2/jalv-1.4.6/build' +run_dir = '/home/pepper/LV2/jalv-1.4.6' +top_dir = '/home/pepper/LV2/jalv-1.4.6' diff --git a/build/.wafpickle-linux2-34015472-98 b/build/.wafpickle-linux2-34015472-98 Binary files differnew file mode 100644 index 0000000..24d874e --- /dev/null +++ b/build/.wafpickle-linux2-34015472-98 diff --git a/build/LV2-render b/build/LV2-render Binary files differnew file mode 100755 index 0000000..769457e --- /dev/null +++ b/build/LV2-render diff --git a/build/c4che/_cache.py b/build/c4che/_cache.py new file mode 100644 index 0000000..1f95c39 --- /dev/null +++ b/build/c4che/_cache.py @@ -0,0 +1,101 @@ +AR = '/usr/bin/ar' +ARFLAGS = 'rcs' +BINDIR = '/usr/local/bin' +CC = ['/usr/bin/gcc'] +CCLNK_SRC_F = [] +CCLNK_TGT_F = ['-o'] +CC_NAME = 'gcc' +CC_SRC_F = [] +CC_TGT_F = ['-c', '-o'] +CC_VERSION = ('4', '9', '2') +CFLAGS = ['-I/home/pepper/LV2_render', '-DNDEBUG', '-fshow-column', '-std=c99'] +CFLAGS_MACBUNDLE = ['-fPIC'] +CFLAGS_cshlib = ['-fPIC'] +CHECKED_JACK = 2 +CHECKED_LILV = 2 +CHECKED_LV2 = 2 +CHECKED_SERD = 2 +CHECKED_SORD = 2 +CHECKED_SRATOM = 2 +CHECKED_SUIL = 2 +COMPILER_CC = 'gcc' +COMPILER_CXX = 'g++' +CPPPATH_ST = '-I%s' +CXX = ['/usr/bin/g++'] +CXXFLAGS = ['-I/home/pepper/LV2_render', '-DNDEBUG', '-fshow-column'] +CXXFLAGS_MACBUNDLE = ['-fPIC'] +CXXFLAGS_cxxshlib = ['-fPIC'] +CXXLNK_SRC_F = [] +CXXLNK_TGT_F = ['-o'] +CXX_NAME = 'gcc' +CXX_SRC_F = [] +CXX_TGT_F = ['-c', '-o'] +DATADIR = '/usr/local/share' +DEBUG = False +DEFINES = ['HAVE_LV2=1', 'HAVE_LILV=1', 'HAVE_SERD=1', 'HAVE_SORD=1', 'HAVE_SUIL=1', 'HAVE_SRATOM=1', 'HAVE_JACK=1', 'HAVE_JACK_PORT_TYPE_GET_BUFFER_SIZE=1', 'HAVE_JACK_METADATA=1', 'JALV_JACK_SESSION=1', 'JALV_VERSION="1.4.6"'] +DEFINES_ST = '-D%s' +DEST_BINFMT = 'elf' +DEST_CPU = 'x86_64' +DEST_OS = 'linux' +DOCDIR = '/usr/local/share/doc' +DOCS = False +INCLUDEDIR = '/usr/local/include' +INCLUDES_LILV = ['/usr/include/lilv-0', '/usr/include/sratom-0', '/usr/include/sord-0', '/usr/include/serd-0'] +INCLUDES_SERD = ['/usr/include/serd-0'] +INCLUDES_SORD = ['/usr/include/sord-0', '/usr/include/serd-0'] +INCLUDES_SRATOM = ['/usr/include/sratom-0', '/usr/include/sord-0', '/usr/include/serd-0'] +INCLUDES_SUIL = ['/usr/include/suil-0'] +JALV_JACK_SESSION = 1 +JALV_VERSION = '1.4.6' +LIB = ['m', 'sndfile'] +LIBDIR = '/usr/local/lib' +LIBPATH_ST = '-L%s' +LIB_JACK = ['jack'] +LIB_LILV = ['lilv-0', 'dl', 'sratom-0', 'sord-0', 'serd-0'] +LIB_SERD = ['serd-0'] +LIB_SORD = ['sord-0', 'serd-0'] +LIB_SRATOM = ['sratom-0', 'sord-0', 'serd-0'] +LIB_ST = '-l%s' +LIB_SUIL = ['suil-0'] +LINKFLAGS_MACBUNDLE = ['-bundle', '-undefined', 'dynamic_lookup'] +LINKFLAGS_cshlib = ['-shared'] +LINKFLAGS_cstlib = ['-Wl,-Bstatic'] +LINKFLAGS_cxxshlib = ['-shared'] +LINKFLAGS_cxxstlib = ['-Wl,-Bstatic'] +LINK_CC = ['/usr/bin/gcc'] +LINK_CXX = ['/usr/bin/g++'] +LV2DIR = '/usr/local/lib/lv2' +MANDIR = '/usr/local/share/man' +PARDEBUG = False +PKGCONFIG = '/usr/bin/pkg-config' +PKG_jack = 'jack' +PKG_lilv_0 = 'lilv-0' +PKG_lv2 = 'lv2' +PKG_serd_0 = 'serd-0' +PKG_sord_0 = 'sord-0' +PKG_sratom_0 = 'sratom-0' +PKG_suil_0 = 'suil-0' +PREFIX = '/usr/local' +RPATH_ST = '-Wl,-rpath,%s' +SHLIB_MARKER = '-Wl,-Bdynamic' +SONAME_ST = '-Wl,-h,%s' +STLIBPATH_ST = '-L%s' +STLIB_MARKER = '-Wl,-Bstatic' +STLIB_ST = '-l%s' +SYSCONFDIR = '/usr/local/etc' +VERSION_jack = '0.120.0' +VERSION_lilv-0 = '0.19.2' +VERSION_lv2 = '1.8.1' +VERSION_serd-0 = '0.14.0' +VERSION_sord-0 = '0.12.0' +VERSION_sratom-0 = '0.4.0' +VERSION_suil-0 = '0.6.0' +cfg_files = ['/home/pepper/LV2_render/build/jalv_config.h'] +cprogram_PATTERN = '%s' +cshlib_PATTERN = 'lib%s.so' +cstlib_PATTERN = 'lib%s.a' +cxxprogram_PATTERN = '%s' +cxxshlib_PATTERN = 'lib%s.so' +cxxstlib_PATTERN = 'lib%s.a' +define_key = ['HAVE_LV2', 'HAVE_LILV', 'HAVE_SERD', 'HAVE_SORD', 'HAVE_SUIL', 'HAVE_SRATOM', 'HAVE_JACK', 'HAVE_JACK_PORT_TYPE_GET_BUFFER_SIZE', 'HAVE_JACK_METADATA', 'JALV_JACK_SESSION', 'JALV_VERSION'] +macbundle_PATTERN = '%s.bundle' diff --git a/build/c4che/build.config.py b/build/c4che/build.config.py new file mode 100644 index 0000000..7424958 --- /dev/null +++ b/build/c4che/build.config.py @@ -0,0 +1,2 @@ +version = 0x1071000 +tools = [{'tool': 'ar', 'tooldir': None, 'funs': None}, {'tool': 'c', 'tooldir': None, 'funs': None}, {'tool': 'gcc', 'tooldir': None, 'funs': None}, {'tool': 'compiler_c', 'tooldir': None, 'funs': None}, {'tool': 'cxx', 'tooldir': None, 'funs': None}, {'tool': 'g++', 'tooldir': None, 'funs': None}, {'tool': 'compiler_cxx', 'tooldir': None, 'funs': None}] diff --git a/build/config.log b/build/config.log new file mode 100644 index 0000000..53933a4 --- /dev/null +++ b/build/config.log @@ -0,0 +1,113 @@ +# project LV2-render (1.4.6) configured on Sun Jan 25 23:28:57 2015 by +# waf 1.7.16 (abi 98, python 20708f0 on linux2) +# using ./waf configure +# +---------------------------------------- +Setting top to +/home/pepper/LV2_render +---------------------------------------- +Setting out to +/home/pepper/LV2_render/build +---------------------------------------------------- +Checking for 'gcc' (c compiler) +find program=['gcc', 'cc'] paths=['/home/pepper/.gem/ruby/2.1.0/bin', '/home/pepper/.gem/ruby/2.1.0/bin', '/usr/local/bin', '/usr/bin', '/bin', '/usr/local/sbin', '/usr/sbin', '/sbin', '/usr/firstscripts', '/root/.gem/ruby/2.1.0/bin', '/opt/android-sdk/build-tools/19.0.2/', '/opt/android-sdk/platform-tools', '/opt/android-sdk/tools', '/opt/devkitpro/devkitPPC/bin', '/opt/marytts/bin', '/usr/bin/site_perl', '/usr/bin/vendor_perl', '/usr/bin/core_perl', '/opt/android-sdk/tools/', '/opt/android-sdk/platform-tools/', '/opt/android-sdk/tools/', '/opt/android-sdk/platform-tools/'] var='CC' -> '/usr/bin/gcc' +find program=['ar'] paths=['/home/pepper/.gem/ruby/2.1.0/bin', '/home/pepper/.gem/ruby/2.1.0/bin', '/usr/local/bin', '/usr/bin', '/bin', '/usr/local/sbin', '/usr/sbin', '/sbin', '/usr/firstscripts', '/root/.gem/ruby/2.1.0/bin', '/opt/android-sdk/build-tools/19.0.2/', '/opt/android-sdk/platform-tools', '/opt/android-sdk/tools', '/opt/devkitpro/devkitPPC/bin', '/opt/marytts/bin', '/usr/bin/site_perl', '/usr/bin/vendor_perl', '/usr/bin/core_perl', '/opt/android-sdk/tools/', '/opt/android-sdk/platform-tools/', '/opt/android-sdk/tools/', '/opt/android-sdk/platform-tools/'] var='AR' -> '/usr/bin/ar' +/usr/bin/gcc +---------------------------------------------------- +Checking for 'g++' (c++ compiler) +find program=['g++', 'c++'] paths=['/home/pepper/.gem/ruby/2.1.0/bin', '/home/pepper/.gem/ruby/2.1.0/bin', '/usr/local/bin', '/usr/bin', '/bin', '/usr/local/sbin', '/usr/sbin', '/sbin', '/usr/firstscripts', '/root/.gem/ruby/2.1.0/bin', '/opt/android-sdk/build-tools/19.0.2/', '/opt/android-sdk/platform-tools', '/opt/android-sdk/tools', '/opt/devkitpro/devkitPPC/bin', '/opt/marytts/bin', '/usr/bin/site_perl', '/usr/bin/vendor_perl', '/usr/bin/core_perl', '/opt/android-sdk/tools/', '/opt/android-sdk/platform-tools/', '/opt/android-sdk/tools/', '/opt/android-sdk/platform-tools/'] var='CXX' -> '/usr/bin/g++' +(tool ar is already loaded, skipping) +/usr/bin/g++ +---------------------------------------------------- +Checking for program pkg-config +/usr/bin/pkg-config +find program=['pkg-config'] paths=['/home/pepper/.gem/ruby/2.1.0/bin', '/home/pepper/.gem/ruby/2.1.0/bin', '/usr/local/bin', '/usr/bin', '/bin', '/usr/local/sbin', '/usr/sbin', '/sbin', '/usr/firstscripts', '/root/.gem/ruby/2.1.0/bin', '/opt/android-sdk/build-tools/19.0.2/', '/opt/android-sdk/platform-tools', '/opt/android-sdk/tools', '/opt/devkitpro/devkitPPC/bin', '/opt/marytts/bin', '/usr/bin/site_perl', '/usr/bin/vendor_perl', '/usr/bin/core_perl', '/opt/android-sdk/tools/', '/opt/android-sdk/platform-tools/', '/opt/android-sdk/tools/', '/opt/android-sdk/platform-tools/'] var='PKGCONFIG' -> '/usr/bin/pkg-config' +---------------------------------------------------- +Checking for 'lv2' >= 1.8.1 +['/usr/bin/pkg-config', '--atleast-version=1.8.1', 'lv2'] +['/usr/bin/pkg-config', '--cflags', '--libs', 'lv2'] +out: + +yes +---------------------------------------------------- +Checking for 'lilv-0' >= 0.19.2 +['/usr/bin/pkg-config', '--atleast-version=0.19.2', 'lilv-0'] +['/usr/bin/pkg-config', '--cflags', '--libs', 'lilv-0'] +out: -I/usr/include/lilv-0 -I/usr/include/sratom-0 -I/usr/include/sord-0 -I/usr/include/serd-0 -llilv-0 -ldl -lsratom-0 -lsord-0 -lserd-0 + +yes +---------------------------------------------------- +Checking for 'serd-0' >= 0.14.0 +['/usr/bin/pkg-config', '--atleast-version=0.14.0', 'serd-0'] +['/usr/bin/pkg-config', '--cflags', '--libs', 'serd-0'] +out: -I/usr/include/serd-0 -lserd-0 + +yes +---------------------------------------------------- +Checking for 'sord-0' >= 0.12.0 +['/usr/bin/pkg-config', '--atleast-version=0.12.0', 'sord-0'] +['/usr/bin/pkg-config', '--cflags', '--libs', 'sord-0'] +out: -I/usr/include/sord-0 -I/usr/include/serd-0 -lsord-0 -lserd-0 + +yes +---------------------------------------------------- +Checking for 'suil-0' >= 0.6.0 +['/usr/bin/pkg-config', '--atleast-version=0.6.0', 'suil-0'] +['/usr/bin/pkg-config', '--cflags', '--libs', 'suil-0'] +out: -I/usr/include/suil-0 -lsuil-0 + +yes +---------------------------------------------------- +Checking for 'sratom-0' >= 0.4.0 +['/usr/bin/pkg-config', '--atleast-version=0.4.0', 'sratom-0'] +['/usr/bin/pkg-config', '--cflags', '--libs', 'sratom-0'] +out: -I/usr/include/sratom-0 -I/usr/include/sord-0 -I/usr/include/serd-0 -lsratom-0 -lsord-0 -lserd-0 + +yes +---------------------------------------------------- +Checking for 'jack' >= 0.120.0 +['/usr/bin/pkg-config', '--atleast-version=0.120.0', 'jack'] +['/usr/bin/pkg-config', '--cflags', '--libs', 'jack'] +out: -ljack + +yes +---------------------------------------------------- +Checking for function jack_port_type_get_buffer_size +==> +#include <jack/jack.h> + +int main(int argc, char **argv) { + void *p; + (void)argc; (void)argv; + p=(void*)(jack_port_type_get_buffer_size); + return 0; +} + +<== +[1/2] [32mcxx: build/.conf_check_af37a22df4c19cf0432d3404a6b219d1/test.cpp -> build/.conf_check_af37a22df4c19cf0432d3404a6b219d1/testbuild/test.cpp.1.o +[0m +['/usr/bin/g++', '-I/home/pepper/LV2_render', '-DNDEBUG', '-fshow-column', '-I/home/pepper/LV2_render/build/.conf_check_af37a22df4c19cf0432d3404a6b219d1/testbuild', '-DHAVE_LV2=1', '-DHAVE_LILV=1', '-DHAVE_SERD=1', '-DHAVE_SORD=1', '-DHAVE_SUIL=1', '-DHAVE_SRATOM=1', '-DHAVE_JACK=1', '../test.cpp', '-c', '-o', 'test.cpp.1.o'] +[2/2] [33mcxxprogram: build/.conf_check_af37a22df4c19cf0432d3404a6b219d1/testbuild/test.cpp.1.o -> build/.conf_check_af37a22df4c19cf0432d3404a6b219d1/testbuild/testprog +[0m +['/usr/bin/g++', 'test.cpp.1.o', '-o', '/home/pepper/LV2_render/build/.conf_check_af37a22df4c19cf0432d3404a6b219d1/testbuild/testprog', '-Wl,-Bstatic', '-Wl,-Bdynamic', '-lm', '-lsndfile', '-ljack'] +yes +---------------------------------------------------- +Checking for function jack_set_property +==> +#include <jack/metadata.h> + +int main(int argc, char **argv) { + void *p; + (void)argc; (void)argv; + p=(void*)(jack_set_property); + return 0; +} + +<== +[1/2] [32mcxx: build/.conf_check_623450d91353b10e839bd52fd98cd354/test.cpp -> build/.conf_check_623450d91353b10e839bd52fd98cd354/testbuild/test.cpp.1.o +[0m +['/usr/bin/g++', '-I/home/pepper/LV2_render', '-DNDEBUG', '-fshow-column', '-I/home/pepper/LV2_render/build/.conf_check_623450d91353b10e839bd52fd98cd354/testbuild', '-DHAVE_LV2=1', '-DHAVE_LILV=1', '-DHAVE_SERD=1', '-DHAVE_SORD=1', '-DHAVE_SUIL=1', '-DHAVE_SRATOM=1', '-DHAVE_JACK=1', '-DHAVE_JACK_PORT_TYPE_GET_BUFFER_SIZE=1', '../test.cpp', '-c', '-o', 'test.cpp.1.o'] +[2/2] [33mcxxprogram: build/.conf_check_623450d91353b10e839bd52fd98cd354/testbuild/test.cpp.1.o -> build/.conf_check_623450d91353b10e839bd52fd98cd354/testbuild/testprog +[0m +['/usr/bin/g++', 'test.cpp.1.o', '-o', '/home/pepper/LV2_render/build/.conf_check_623450d91353b10e839bd52fd98cd354/testbuild/testprog', '-Wl,-Bstatic', '-Wl,-Bdynamic', '-lm', '-lsndfile', '-ljack'] +yes diff --git a/build/jalv_config.h b/build/jalv_config.h new file mode 100644 index 0000000..d9ddc94 --- /dev/null +++ b/build/jalv_config.h @@ -0,0 +1,18 @@ +/* WARNING! All changes made to this file will be lost! */ + +#ifndef W_JALV_CONFIG_H_WAF +#define W_JALV_CONFIG_H_WAF + +#define HAVE_LV2 1 +#define HAVE_LILV 1 +#define HAVE_SERD 1 +#define HAVE_SORD 1 +#define HAVE_SUIL 1 +#define HAVE_SRATOM 1 +#define HAVE_JACK 1 +#define HAVE_JACK_PORT_TYPE_GET_BUFFER_SIZE 1 +#define HAVE_JACK_METADATA 1 +#define JALV_JACK_SESSION 1 +#define JALV_VERSION "1.4.6" + +#endif /* W_JALV_CONFIG_H_WAF */ diff --git a/build/jalv_qt4_meta.hpp b/build/jalv_qt4_meta.hpp new file mode 100644 index 0000000..2f33319 --- /dev/null +++ b/build/jalv_qt4_meta.hpp @@ -0,0 +1,108 @@ +/**************************************************************************** +** Meta object code from reading C++ file 'jalv_qt4.cpp' +** +** Created by: The Qt Meta Object Compiler version 67 (Qt 5.3.1) +** +** WARNING! All changes made in this file will be lost! +*****************************************************************************/ + +#include <QtCore/qbytearray.h> +#include <QtCore/qmetatype.h> +#if !defined(Q_MOC_OUTPUT_REVISION) +#error "The header file 'jalv_qt4.cpp' doesn't include <QObject>." +#elif Q_MOC_OUTPUT_REVISION != 67 +#error "This file was generated using the moc from 5.3.1. It" +#error "cannot be used with the include files from this version of Qt." +#error "(The moc has changed too much.)" +#endif + +QT_BEGIN_MOC_NAMESPACE +struct qt_meta_stringdata_PresetAction_t { + QByteArrayData data[3]; + char stringdata[27]; +}; +#define QT_MOC_LITERAL(idx, ofs, len) \ + Q_STATIC_BYTE_ARRAY_DATA_HEADER_INITIALIZER_WITH_OFFSET(len, \ + qptrdiff(offsetof(qt_meta_stringdata_PresetAction_t, stringdata) + ofs \ + - idx * sizeof(QByteArrayData)) \ + ) +static const qt_meta_stringdata_PresetAction_t qt_meta_stringdata_PresetAction = { + { +QT_MOC_LITERAL(0, 0, 12), +QT_MOC_LITERAL(1, 13, 12), +QT_MOC_LITERAL(2, 26, 0) + }, + "PresetAction\0presetChosen\0" +}; +#undef QT_MOC_LITERAL + +static const uint qt_meta_data_PresetAction[] = { + + // content: + 7, // revision + 0, // classname + 0, 0, // classinfo + 1, 14, // methods + 0, 0, // properties + 0, 0, // enums/sets + 0, 0, // constructors + 0, // flags + 0, // signalCount + + // slots: name, argc, parameters, tag, flags + 1, 0, 19, 2, 0x0a /* Public */, + + // slots: parameters + QMetaType::Void, + + 0 // eod +}; + +void PresetAction::qt_static_metacall(QObject *_o, QMetaObject::Call _c, int _id, void **_a) +{ + if (_c == QMetaObject::InvokeMetaMethod) { + PresetAction *_t = static_cast<PresetAction *>(_o); + switch (_id) { + case 0: _t->presetChosen(); break; + default: ; + } + } + Q_UNUSED(_a); +} + +const QMetaObject PresetAction::staticMetaObject = { + { &QAction::staticMetaObject, qt_meta_stringdata_PresetAction.data, + qt_meta_data_PresetAction, qt_static_metacall, 0, 0} +}; + + +const QMetaObject *PresetAction::metaObject() const +{ + return QObject::d_ptr->metaObject ? QObject::d_ptr->dynamicMetaObject() : &staticMetaObject; +} + +void *PresetAction::qt_metacast(const char *_clname) +{ + if (!_clname) return 0; + if (!strcmp(_clname, qt_meta_stringdata_PresetAction.stringdata)) + return static_cast<void*>(const_cast< PresetAction*>(this)); + return QAction::qt_metacast(_clname); +} + +int PresetAction::qt_metacall(QMetaObject::Call _c, int _id, void **_a) +{ + _id = QAction::qt_metacall(_c, _id, _a); + if (_id < 0) + return _id; + if (_c == QMetaObject::InvokeMetaMethod) { + if (_id < 1) + qt_static_metacall(this, _c, _id, _a); + _id -= 1; + } else if (_c == QMetaObject::RegisterMethodArgumentMetaType) { + if (_id < 1) + *reinterpret_cast<int*>(_a[0]) = -1; + _id -= 1; + } + return _id; +} +QT_END_MOC_NAMESPACE diff --git a/build/src/LV2-render.c.1.o b/build/src/LV2-render.c.1.o Binary files differnew file mode 100644 index 0000000..b7674d1 --- /dev/null +++ b/build/src/LV2-render.c.1.o diff --git a/build/src/LV2-render_console.c.1.o b/build/src/LV2-render_console.c.1.o Binary files differnew file mode 100644 index 0000000..de5f1a7 --- /dev/null +++ b/build/src/LV2-render_console.c.1.o diff --git a/build/src/jalv.c.1.o b/build/src/jalv.c.1.o Binary files differnew file mode 100644 index 0000000..a7f67bf --- /dev/null +++ b/build/src/jalv.c.1.o diff --git a/build/src/jalv.c.2.o b/build/src/jalv.c.2.o Binary files differnew file mode 100644 index 0000000..0fb8a75 --- /dev/null +++ b/build/src/jalv.c.2.o diff --git a/build/src/jalv.c.3.o b/build/src/jalv.c.3.o Binary files differnew file mode 100644 index 0000000..0fb8a75 --- /dev/null +++ b/build/src/jalv.c.3.o diff --git a/build/src/jalv.c.4.o b/build/src/jalv.c.4.o Binary files differnew file mode 100644 index 0000000..0fb8a75 --- /dev/null +++ b/build/src/jalv.c.4.o diff --git a/build/src/jalv.c.5.o b/build/src/jalv.c.5.o Binary files differnew file mode 100644 index 0000000..0c43428 --- /dev/null +++ b/build/src/jalv.c.5.o diff --git a/build/src/jalv.c.6.o b/build/src/jalv.c.6.o Binary files differnew file mode 100644 index 0000000..0fb8a75 --- /dev/null +++ b/build/src/jalv.c.6.o diff --git a/build/src/jalv_console.c.1.o b/build/src/jalv_console.c.1.o Binary files differnew file mode 100644 index 0000000..1cc8454 --- /dev/null +++ b/build/src/jalv_console.c.1.o diff --git a/build/src/log.c.1.o b/build/src/log.c.1.o Binary files differnew file mode 100644 index 0000000..a22e698 --- /dev/null +++ b/build/src/log.c.1.o diff --git a/build/src/log.c.2.o b/build/src/log.c.2.o Binary files differnew file mode 100644 index 0000000..a22e698 --- /dev/null +++ b/build/src/log.c.2.o diff --git a/build/src/log.c.3.o b/build/src/log.c.3.o Binary files differnew file mode 100644 index 0000000..a22e698 --- /dev/null +++ b/build/src/log.c.3.o diff --git a/build/src/log.c.4.o b/build/src/log.c.4.o Binary files differnew file mode 100644 index 0000000..a22e698 --- /dev/null +++ b/build/src/log.c.4.o diff --git a/build/src/log.c.5.o b/build/src/log.c.5.o Binary files differnew file mode 100644 index 0000000..323d52b --- /dev/null +++ b/build/src/log.c.5.o diff --git a/build/src/log.c.6.o b/build/src/log.c.6.o Binary files differnew file mode 100644 index 0000000..a22e698 --- /dev/null +++ b/build/src/log.c.6.o diff --git a/build/src/lv2_evbuf.c.1.o b/build/src/lv2_evbuf.c.1.o Binary files differnew file mode 100644 index 0000000..407d490 --- /dev/null +++ b/build/src/lv2_evbuf.c.1.o diff --git a/build/src/lv2_evbuf.c.2.o b/build/src/lv2_evbuf.c.2.o Binary files differnew file mode 100644 index 0000000..407d490 --- /dev/null +++ b/build/src/lv2_evbuf.c.2.o diff --git a/build/src/lv2_evbuf.c.3.o b/build/src/lv2_evbuf.c.3.o Binary files differnew file mode 100644 index 0000000..407d490 --- /dev/null +++ b/build/src/lv2_evbuf.c.3.o diff --git a/build/src/lv2_evbuf.c.4.o b/build/src/lv2_evbuf.c.4.o Binary files differnew file mode 100644 index 0000000..407d490 --- /dev/null +++ b/build/src/lv2_evbuf.c.4.o diff --git a/build/src/lv2_evbuf.c.5.o b/build/src/lv2_evbuf.c.5.o Binary files differnew file mode 100644 index 0000000..a391e56 --- /dev/null +++ b/build/src/lv2_evbuf.c.5.o diff --git a/build/src/lv2_evbuf.c.6.o b/build/src/lv2_evbuf.c.6.o Binary files differnew file mode 100644 index 0000000..407d490 --- /dev/null +++ b/build/src/lv2_evbuf.c.6.o diff --git a/build/src/midi/fluid_list.c.1.o b/build/src/midi/fluid_list.c.1.o Binary files differnew file mode 100644 index 0000000..654fc88 --- /dev/null +++ b/build/src/midi/fluid_list.c.1.o diff --git a/build/src/midi/fluid_list.c.2.o b/build/src/midi/fluid_list.c.2.o Binary files differnew file mode 100644 index 0000000..654fc88 --- /dev/null +++ b/build/src/midi/fluid_list.c.2.o diff --git a/build/src/midi/fluid_list.c.3.o b/build/src/midi/fluid_list.c.3.o Binary files differnew file mode 100644 index 0000000..654fc88 --- /dev/null +++ b/build/src/midi/fluid_list.c.3.o diff --git a/build/src/midi/fluid_list.c.4.o b/build/src/midi/fluid_list.c.4.o Binary files differnew file mode 100644 index 0000000..654fc88 --- /dev/null +++ b/build/src/midi/fluid_list.c.4.o diff --git a/build/src/midi/fluid_list.c.5.o b/build/src/midi/fluid_list.c.5.o Binary files differnew file mode 100644 index 0000000..6e6fe92 --- /dev/null +++ b/build/src/midi/fluid_list.c.5.o diff --git a/build/src/midi/fluid_list.c.6.o b/build/src/midi/fluid_list.c.6.o Binary files differnew file mode 100644 index 0000000..654fc88 --- /dev/null +++ b/build/src/midi/fluid_list.c.6.o diff --git a/build/src/midi/fluid_midi.c.1.o b/build/src/midi/fluid_midi.c.1.o Binary files differnew file mode 100644 index 0000000..e42f80d --- /dev/null +++ b/build/src/midi/fluid_midi.c.1.o diff --git a/build/src/midi/fluid_midi.c.2.o b/build/src/midi/fluid_midi.c.2.o Binary files differnew file mode 100644 index 0000000..e42f80d --- /dev/null +++ b/build/src/midi/fluid_midi.c.2.o diff --git a/build/src/midi/fluid_midi.c.3.o b/build/src/midi/fluid_midi.c.3.o Binary files differnew file mode 100644 index 0000000..e42f80d --- /dev/null +++ b/build/src/midi/fluid_midi.c.3.o diff --git a/build/src/midi/fluid_midi.c.4.o b/build/src/midi/fluid_midi.c.4.o Binary files differnew file mode 100644 index 0000000..e42f80d --- /dev/null +++ b/build/src/midi/fluid_midi.c.4.o diff --git a/build/src/midi/fluid_midi.c.5.o b/build/src/midi/fluid_midi.c.5.o Binary files differnew file mode 100644 index 0000000..b7170cc --- /dev/null +++ b/build/src/midi/fluid_midi.c.5.o diff --git a/build/src/midi/fluid_midi.c.6.o b/build/src/midi/fluid_midi.c.6.o Binary files differnew file mode 100644 index 0000000..e42f80d --- /dev/null +++ b/build/src/midi/fluid_midi.c.6.o diff --git a/build/src/midi/midi_loader.c.1.o b/build/src/midi/midi_loader.c.1.o Binary files differnew file mode 100644 index 0000000..df2f558 --- /dev/null +++ b/build/src/midi/midi_loader.c.1.o diff --git a/build/src/midi/midi_loader.c.2.o b/build/src/midi/midi_loader.c.2.o Binary files differnew file mode 100644 index 0000000..df2f558 --- /dev/null +++ b/build/src/midi/midi_loader.c.2.o diff --git a/build/src/midi/midi_loader.c.3.o b/build/src/midi/midi_loader.c.3.o Binary files differnew file mode 100644 index 0000000..df2f558 --- /dev/null +++ b/build/src/midi/midi_loader.c.3.o diff --git a/build/src/midi/midi_loader.c.4.o b/build/src/midi/midi_loader.c.4.o Binary files differnew file mode 100644 index 0000000..df2f558 --- /dev/null +++ b/build/src/midi/midi_loader.c.4.o diff --git a/build/src/midi/midi_loader.c.5.o b/build/src/midi/midi_loader.c.5.o Binary files differnew file mode 100644 index 0000000..80c1b34 --- /dev/null +++ b/build/src/midi/midi_loader.c.5.o diff --git a/build/src/midi/midi_loader.c.6.o b/build/src/midi/midi_loader.c.6.o Binary files differnew file mode 100644 index 0000000..df2f558 --- /dev/null +++ b/build/src/midi/midi_loader.c.6.o diff --git a/build/src/state.c.1.o b/build/src/state.c.1.o Binary files differnew file mode 100644 index 0000000..ca3533a --- /dev/null +++ b/build/src/state.c.1.o diff --git a/build/src/state.c.2.o b/build/src/state.c.2.o Binary files differnew file mode 100644 index 0000000..16a6804 --- /dev/null +++ b/build/src/state.c.2.o diff --git a/build/src/state.c.3.o b/build/src/state.c.3.o Binary files differnew file mode 100644 index 0000000..16a6804 --- /dev/null +++ b/build/src/state.c.3.o diff --git a/build/src/state.c.4.o b/build/src/state.c.4.o Binary files differnew file mode 100644 index 0000000..16a6804 --- /dev/null +++ b/build/src/state.c.4.o diff --git a/build/src/state.c.5.o b/build/src/state.c.5.o Binary files differnew file mode 100644 index 0000000..1658601 --- /dev/null +++ b/build/src/state.c.5.o diff --git a/build/src/state.c.6.o b/build/src/state.c.6.o Binary files differnew file mode 100644 index 0000000..16a6804 --- /dev/null +++ b/build/src/state.c.6.o diff --git a/build/src/symap.c.1.o b/build/src/symap.c.1.o Binary files differnew file mode 100644 index 0000000..8c9c0f0 --- /dev/null +++ b/build/src/symap.c.1.o diff --git a/build/src/symap.c.2.o b/build/src/symap.c.2.o Binary files differnew file mode 100644 index 0000000..8c9c0f0 --- /dev/null +++ b/build/src/symap.c.2.o diff --git a/build/src/symap.c.3.o b/build/src/symap.c.3.o Binary files differnew file mode 100644 index 0000000..8c9c0f0 --- /dev/null +++ b/build/src/symap.c.3.o diff --git a/build/src/symap.c.4.o b/build/src/symap.c.4.o Binary files differnew file mode 100644 index 0000000..8c9c0f0 --- /dev/null +++ b/build/src/symap.c.4.o diff --git a/build/src/symap.c.5.o b/build/src/symap.c.5.o Binary files differnew file mode 100644 index 0000000..03e1f68 --- /dev/null +++ b/build/src/symap.c.5.o diff --git a/build/src/symap.c.6.o b/build/src/symap.c.6.o Binary files differnew file mode 100644 index 0000000..8c9c0f0 --- /dev/null +++ b/build/src/symap.c.6.o diff --git a/build/src/worker.c.1.o b/build/src/worker.c.1.o Binary files differnew file mode 100644 index 0000000..559b401 --- /dev/null +++ b/build/src/worker.c.1.o diff --git a/build/src/worker.c.2.o b/build/src/worker.c.2.o Binary files differnew file mode 100644 index 0000000..dc7e745 --- /dev/null +++ b/build/src/worker.c.2.o diff --git a/build/src/worker.c.3.o b/build/src/worker.c.3.o Binary files differnew file mode 100644 index 0000000..dc7e745 --- /dev/null +++ b/build/src/worker.c.3.o diff --git a/build/src/worker.c.4.o b/build/src/worker.c.4.o Binary files differnew file mode 100644 index 0000000..dc7e745 --- /dev/null +++ b/build/src/worker.c.4.o diff --git a/build/src/worker.c.5.o b/build/src/worker.c.5.o Binary files differnew file mode 100644 index 0000000..fec4cc8 --- /dev/null +++ b/build/src/worker.c.5.o diff --git a/build/src/worker.c.6.o b/build/src/worker.c.6.o Binary files differnew file mode 100644 index 0000000..dc7e745 --- /dev/null +++ b/build/src/worker.c.6.o diff --git a/doc/jalv.1 b/doc/jalv.1 new file mode 100644 index 0000000..a196c01 --- /dev/null +++ b/doc/jalv.1 @@ -0,0 +1,50 @@ +.TH JALV 1 "19 Apr 2012" + +.SH NAME +.B jalv \- Run an LV2 plugin as a JACK application (console version). + +.SH SYNOPSIS +.B jalv [OPTION]... PLUGIN_URI + +.SH OPTIONS + +.TP +\fB\-h\fR, \fB\-\-help\fR +Print the command line options. + +.TP +\fB\-c\fR +Print control output changes to stdout. + +.TP +\fB\-c SYM=VAL\fR +Set control value (e.g. "vol=1.4"). + +.TP +\fB\-u UUID\fR, \fB\-\-uuid UUID\fR +UUID for Jack session restoration. + +.TP +\fB\-l DIR\fR, \fB\-\-load DIR\fR +Load state from state directory. + +.TP +\fB\-d\fR, \fB\-\-dump\fR +Dump plugin <=> UI communication. + +.TP +\fB\-b SIZE\fR +Buffer size for plugin <=> UI communication. + +.SH SEE ALSO +.BR jalv.gtk(1), +.BR jalv.gtkmm(1), +.BR jalv.qt(2), +.BR lv2ls(1), +.BR jackd(1) + +.SH AUTHOR +jalv was written by David Robillard <d@drobilla.net> +.PP +This manual page was written by JaromÃr Mikes <mira.mikes@seznam.cz> +and David Robillard <d@drobilla.net> diff --git a/short_example.mid b/short_example.mid Binary files differnew file mode 100644 index 0000000..a790260 --- /dev/null +++ b/short_example.mid diff --git a/src/.jalv_internal.h.swp b/src/.jalv_internal.h.swp Binary files differnew file mode 100644 index 0000000..ec50f6b --- /dev/null +++ b/src/.jalv_internal.h.swp diff --git a/src/LV2-render.c b/src/LV2-render.c new file mode 100644 index 0000000..d1be763 --- /dev/null +++ b/src/LV2-render.c @@ -0,0 +1,823 @@ +#define _POSIX_C_SOURCE 200809L /* for mkdtemp */ +#define _DARWIN_C_SOURCE /* for mkdtemp on OSX */ +#define _DEFAULT_SOURCE +#include <assert.h> +#include <math.h> +#include <signal.h> +#include <stdio.h> +#include <stdlib.h> +#include <string.h> + +#ifndef __cplusplus +# include <stdbool.h> +#endif + +#ifdef _WIN32 +# include <io.h> /* for _mktemp */ +# define snprintf _snprintf +#else +# include <sys/stat.h> +# include <sys/types.h> +# include <unistd.h> +#endif + +#include "jalv_config.h" +#include "LV2-render_internal.h" + + +#include "lv2/lv2plug.in/ns/ext/atom/atom.h" +#include "lv2/lv2plug.in/ns/ext/buf-size/buf-size.h" +#include "lv2/lv2plug.in/ns/ext/data-access/data-access.h" +#include "lv2/lv2plug.in/ns/ext/event/event.h" +#include "lv2/lv2plug.in/ns/ext/options/options.h" +#include "lv2/lv2plug.in/ns/ext/parameters/parameters.h" +#include "lv2/lv2plug.in/ns/ext/patch/patch.h" +#include "lv2/lv2plug.in/ns/ext/port-groups/port-groups.h" +#include "lv2/lv2plug.in/ns/ext/presets/presets.h" +#include "lv2/lv2plug.in/ns/ext/state/state.h" +#include "lv2/lv2plug.in/ns/ext/time/time.h" +#include "lv2/lv2plug.in/ns/ext/uri-map/uri-map.h" +#include "lv2/lv2plug.in/ns/ext/urid/urid.h" +#include "lv2/lv2plug.in/ns/ext/worker/worker.h" +#include "lv2/lv2plug.in/ns/extensions/ui/ui.h" + +#include "lilv/lilv.h" + +#include "lv2_evbuf.h" +#include "worker.h" + +#define NS_RDF "http://www.w3.org/1999/02/22-rdf-syntax-ns#" + +#define USTR(str) ((const uint8_t*)str) + +#ifndef MIN +# define MIN(a, b) (((a) < (b)) ? (a) : (b)) +#endif + +#ifndef MAX +# define MAX(a, b) (((a) > (b)) ? (a) : (b)) +#endif + +#ifdef __clang__ +# define REALTIME __attribute__((annotate("realtime"))) +#else +# define REALTIME +#endif + + +/* Size factor for UI ring buffers. The ring size is a few times the size of + an event output to give the UI a chance to keep up. Experiments with Ingen, + which can highly saturate its event output, led me to this value. It + really ought to be enough for anybody(TM). +*/ +#define N_BUFFER_CYCLES 16 + +#include <alsa/asoundlib.h> +#include <sndfile.h> +#include "midi/midi_loader.h" +#include "midi/fluidsynth_priv.h" +#define SAMPLE_RATE 48000 // here + +//so min is here, how come it doesn't see it? not sure, try remove inline +int min(int x, int y) { + return (x < y) ? x : y; +} + +typedef struct process_midi_ctx_t { + Jalv *jalv; + SNDFILE *outfile; + float sample_rate; +} process_midi_ctx_t; + + +void print_audio_to_terminal(float *sf_output, size_t nframes){ + size_t i; + for(i = 0; i< nframes; i++){ + printf("%04x ", * ((unsigned int *)sf_output) ); + sf_output++; + } +} + +int write_audio_to_file(SNDFILE *outfile, float *sf_output, size_t nframes){ + size_t items_written = 0; + /* Write the audio */ + if ((items_written = sf_writef_float(outfile, + sf_output, + nframes)) != nframes) { + fprintf(stderr, "Error: can't write data to output file\n"); + fprintf(stderr, "%s: %s\n", "jalv", sf_strerror(outfile)); + return 1; + } + return 0; +} + +SNDFILE *open_wav_file(char *output_file, float sample_rate, int nchannels, size_t length){ + /* prepare file */ + SF_INFO outsfinfo; + SNDFILE *outfile; + + outsfinfo.samplerate = sample_rate; + outsfinfo.channels = nchannels; + outsfinfo.format = SF_FORMAT_WAV | SF_FORMAT_PCM_16; + + outsfinfo.frames = length; + + outfile = sf_open(output_file, SFM_WRITE, &outsfinfo); + return outfile; +} + + +int process_midi_cb(fluid_midi_event_t *event, size_t msecs, process_midi_ctx_t *ctx) +{ + Jalv *jalv = ctx->jalv; + float **pluginOutputBuffers = (float **)calloc(jalv->num_ports, sizeof(float *)); + float *pluginOutputBuffer_first = NULL; + size_t nframes; + + /* convert msecs */ + nframes = msecs * ctx->sample_rate / 1000; + + /* Prepare port buffers */ + for (uint32_t p = 0; p < jalv->num_ports; ++p) { + struct Port* port = &jalv->ports[p]; + if (port->type == TYPE_AUDIO) { + pluginOutputBuffers[p] = (float *)calloc(nframes, sizeof(float)); + if(!pluginOutputBuffer_first){ pluginOutputBuffer_first = pluginOutputBuffers[p]; }; + + lilv_instance_connect_port( + jalv->instance, p, //connect port p to this location + pluginOutputBuffers[p] + ); + printf("buffer %x ptr: %8x\n", p, pluginOutputBuffers[p]); + } else if (port->type == TYPE_EVENT && port->flow == FLOW_INPUT) { + lv2_evbuf_reset(port->evbuf, true); + + LV2_Evbuf_Iterator iter = lv2_evbuf_begin(port->evbuf); + + uint8_t midi_event_buffer[3]; + midi_event_buffer[0] = event->type; + midi_event_buffer[1] = event->param1; + midi_event_buffer[2] = event->param2; +//atom_event: type: 19 frames: 1013 size: 3 + lv2_evbuf_write(&iter, + 0, 0, //Doesn't care about these, not sure why + jalv->midi_event_id, + sizeof(midi_event_buffer), midi_event_buffer); + } else if (port->type == TYPE_EVENT) { + /* Clear event output for plugin to write to */ +// printf("CLEARING EVENT\n"); + lv2_evbuf_reset(port->evbuf, false); + } + } + + + + lilv_instance_run(jalv->instance, nframes); + + write_audio_to_file(ctx->outfile, pluginOutputBuffers[3], nframes); //TODO ADD LATER +// printf("total ports: %d\n", jalv->num_ports); + for(int i=0; i<jalv->num_ports; i++){ + if(pluginOutputBuffers[i]){ + //printf("buffer: %x content:", i); + //print_audio_to_terminal(pluginOutputBuffers[i], nframes); + free(pluginOutputBuffers[i]); + } + } + + + + + return 0; +} + + +ZixSem exit_sem; /**< Exit semaphore */ + +static LV2_URID +map_uri(LV2_URID_Map_Handle handle, + const char* uri) +{ + Jalv* jalv = (Jalv*)handle; + zix_sem_wait(&jalv->symap_lock); + const LV2_URID id = symap_map(jalv->symap, uri); + zix_sem_post(&jalv->symap_lock); + return id; +} + +static const char* +unmap_uri(LV2_URID_Unmap_Handle handle, + LV2_URID urid) +{ + Jalv* jalv = (Jalv*)handle; + zix_sem_wait(&jalv->symap_lock); + const char* uri = symap_unmap(jalv->symap, urid); + zix_sem_post(&jalv->symap_lock); + return uri; +} + +/** + Map function for URI map extension. +*/ +static uint32_t +uri_to_id(LV2_URI_Map_Callback_Data callback_data, + const char* map, + const char* uri) +{ + Jalv* jalv = (Jalv*)callback_data; + zix_sem_wait(&jalv->symap_lock); + const LV2_URID id = symap_map(jalv->symap, uri); + zix_sem_post(&jalv->symap_lock); + return id; +} + + + +//{{{ LV2 host features defined +#define NS_EXT "http://lv2plug.in/ns/ext/" +static LV2_URI_Map_Feature uri_map = { NULL, &uri_to_id }; + +static LV2_Extension_Data_Feature ext_data = { NULL }; + + +static LV2_Feature uri_map_feature = { NS_EXT "uri-map", &uri_map }; +static LV2_Feature map_feature = { LV2_URID__map, NULL }; +static LV2_Feature unmap_feature = { LV2_URID__unmap, NULL }; +static LV2_Feature make_path_feature = { LV2_STATE__makePath, NULL }; +static LV2_Feature schedule_feature = { LV2_WORKER__schedule, NULL }; +static LV2_Feature log_feature = { LV2_LOG__log, NULL }; +static LV2_Feature options_feature = { LV2_OPTIONS__options, NULL }; +static LV2_Feature def_state_feature = { LV2_STATE__loadDefaultState, NULL }; + + +/** These features have no data */ +static LV2_Feature buf_size_features[3] = { + { LV2_BUF_SIZE__powerOf2BlockLength, NULL }, + { LV2_BUF_SIZE__fixedBlockLength, NULL }, + { LV2_BUF_SIZE__boundedBlockLength, NULL } }; + +const LV2_Feature* features[13] = { + &uri_map_feature, &map_feature, &unmap_feature, + &make_path_feature, + &schedule_feature, + &log_feature, + &options_feature, + &def_state_feature, + &buf_size_features[0], + &buf_size_features[1], + &buf_size_features[2], + NULL +}; + +/** Return true iff Jalv supports the given feature. */ +static bool +feature_is_supported(const char* uri) +{ + if (!strcmp(uri, "http://lv2plug.in/ns/lv2core#isLive")) { + return true; + } + for (const LV2_Feature*const* f = features; *f; ++f) { + if (!strcmp(uri, (*f)->URI)) { + return true; + } + } + return false; +} + +//}}} + +/** Abort and exit on error */ +static void +die(const char* msg) +{ + fprintf(stderr, "%s\n", msg); + exit(EXIT_FAILURE); +} + +static void +create_port(Jalv* jalv, + uint32_t port_index, + float default_value) +{ + struct Port* const port = &jalv->ports[port_index]; + + port->lilv_port = lilv_plugin_get_port_by_index(jalv->plugin, port_index); + port->evbuf = NULL; + port->buf_size = 0; + port->index = port_index; + port->control = 0.0f; + port->flow = FLOW_UNKNOWN; + + const bool optional = lilv_port_has_property( + jalv->plugin, port->lilv_port, jalv->nodes.lv2_connectionOptional); + + /* Set the port flow (input or output) */ + if (lilv_port_is_a(jalv->plugin, port->lilv_port, jalv->nodes.lv2_InputPort)) { + port->flow = FLOW_INPUT; + } else if (lilv_port_is_a(jalv->plugin, port->lilv_port, + jalv->nodes.lv2_OutputPort)) { + port->flow = FLOW_OUTPUT; + } else if (!optional) { + die("Mandatory port has unknown type (neither input nor output)"); + } + + /* Set control values */ + if (lilv_port_is_a(jalv->plugin, port->lilv_port, jalv->nodes.lv2_ControlPort)) { + port->type = TYPE_CONTROL; + port->control = isnan(default_value) ? 0.0f : default_value; + } else if (lilv_port_is_a(jalv->plugin, port->lilv_port, + jalv->nodes.lv2_AudioPort)) { + port->type = TYPE_AUDIO; + } else if (lilv_port_is_a(jalv->plugin, port->lilv_port, + jalv->nodes.ev_EventPort)) { + port->type = TYPE_EVENT; + port->old_api = true; + } else if (lilv_port_is_a(jalv->plugin, port->lilv_port, + jalv->nodes.atom_AtomPort)) { + port->type = TYPE_EVENT; + port->old_api = false; + } else if (!optional) { + die("Mandatory port has unknown data type"); + } + + LilvNode* min_size = lilv_port_get( + jalv->plugin, port->lilv_port, jalv->nodes.rsz_minimumSize); + if (min_size && lilv_node_is_int(min_size)) { + port->buf_size = lilv_node_as_int(min_size); + jalv->opts.buffer_size = MAX( + jalv->opts.buffer_size, port->buf_size * N_BUFFER_CYCLES); + } + lilv_node_free(min_size); + + /* Update longest symbol for aligned console printing */ + const LilvNode* sym = lilv_port_get_symbol(jalv->plugin, port->lilv_port); + const size_t len = strlen(lilv_node_as_string(sym)); + if (len > jalv->longest_sym) { + jalv->longest_sym = len; + } +} + +/** + Create port structures from data (via create_port()) for all ports. +*/ +void +jalv_create_ports(Jalv* jalv) +{ + jalv->num_ports = lilv_plugin_get_num_ports(jalv->plugin); + jalv->ports = (struct Port*)calloc(jalv->num_ports, sizeof(struct Port)); + float* default_values = (float*)calloc( + lilv_plugin_get_num_ports(jalv->plugin), sizeof(float)); + lilv_plugin_get_port_ranges_float(jalv->plugin, NULL, NULL, default_values); + + for (uint32_t i = 0; i < jalv->num_ports; ++i) { + create_port(jalv, i, default_values[i]); + } + + const LilvPort* control_input = lilv_plugin_get_port_by_designation( + jalv->plugin, jalv->nodes.lv2_InputPort, jalv->nodes.lv2_control); + if (control_input) { + jalv->control_in = lilv_port_get_index(jalv->plugin, control_input); + } + + free(default_values); +} + +/** + Allocate port buffers (only necessary for MIDI). +*/ +static void +jalv_allocate_port_buffers(Jalv* jalv) +{ + for (uint32_t i = 0; i < jalv->num_ports; ++i) { + struct Port* const port = &jalv->ports[i]; + switch (port->type) { + case TYPE_EVENT: + lv2_evbuf_free(port->evbuf); + const size_t buf_size = (port->buf_size > 0) + ? port->buf_size + : jalv->midi_buf_size; + port->evbuf = lv2_evbuf_new( + buf_size, + port->old_api ? LV2_EVBUF_EVENT : LV2_EVBUF_ATOM, + jalv->map.map(jalv->map.handle, + lilv_node_as_string(jalv->nodes.atom_Chunk)), + jalv->map.map(jalv->map.handle, + lilv_node_as_string(jalv->nodes.atom_Sequence))); + //FIXME instance setup here + lilv_instance_connect_port( + jalv->instance, i, lv2_evbuf_get_buffer(port->evbuf)); + default: break; + } + } +} + +/** + Get a port structure by symbol. + + TODO: Build an index to make this faster, currently O(n) which may be + a problem when restoring the state of plugins with many ports. +*/ +struct Port* +jalv_port_by_symbol(Jalv* jalv, const char* sym) +{ + for (uint32_t i = 0; i < jalv->num_ports; ++i) { + struct Port* const port = &jalv->ports[i]; + const LilvNode* port_sym = lilv_port_get_symbol(jalv->plugin, + port->lilv_port); + + if (!strcmp(lilv_node_as_string(port_sym), sym)) { + return port; + } + } + + return NULL; +} + +static void +print_control_value(Jalv* jalv, const struct Port* port, float value) +{ + const LilvNode* sym = lilv_port_get_symbol(jalv->plugin, port->lilv_port); + printf("%-*s = %f\n", jalv->longest_sym, lilv_node_as_string(sym), value); +} +//}}} + +static void +activate_port(Jalv* jalv, + uint32_t port_index) +{ + struct Port* const port = &jalv->ports[port_index]; + + const LilvNode* sym = lilv_port_get_symbol(jalv->plugin, port->lilv_port); + + /* Connect unsupported ports to NULL (known to be optional by this point) */ + if (port->flow == FLOW_UNKNOWN || port->type == TYPE_UNKNOWN) { + lilv_instance_connect_port(jalv->instance, port_index, NULL); + return; + } + + /* Connect the port based on its type */ + switch (port->type) { + case TYPE_CONTROL: + print_control_value(jalv, port, port->control); + lilv_instance_connect_port(jalv->instance, port_index, &port->control); + break; + case TYPE_AUDIO: + //FIXME maybe connect the ports to the buffers here instead + break; + case TYPE_EVENT: + if (lilv_port_supports_event( + jalv->plugin, port->lilv_port, jalv->nodes.midi_MidiEvent)) { + } + break; + default: + break; + } + +} + +static bool +jalv_apply_control_arg(Jalv* jalv, const char* s) +{ + char sym[256]; + float val = 0.0f; + if (sscanf(s, "%[^=]=%f", sym, &val) != 2) { + fprintf(stderr, "warning: Ignoring invalid value `%s'\n", s); + return false; + } + + struct Port* port = jalv_port_by_symbol(jalv, sym); + if (!port) { + fprintf(stderr, "warning: Ignoring value for unknown port `%s'\n", sym); + return false; + } + + port->control = val; + return true; +} + +static void +signal_handler(int ignored) +{ + zix_sem_post(&exit_sem); +} + +int +main(int argc, char** argv) +{ + Jalv jalv; + memset(&jalv, '\0', sizeof(Jalv)); + jalv.prog_name = argv[0]; + jalv.block_length = 4096; + jalv.midi_buf_size = 1024; + jalv.play_state = JALV_PAUSED; + jalv.bpm = 120.0f; + + if (jalv_init(&argc, &argv, &jalv.opts)) { + return EXIT_FAILURE; + } + + if (jalv.opts.uuid) { + printf("UUID: %s\n", jalv.opts.uuid); + } + + jalv.symap = symap_new(); + zix_sem_init(&jalv.symap_lock, 1); + uri_map.callback_data = &jalv; + + jalv.map.handle = &jalv; + jalv.map.map = map_uri; + map_feature.data = &jalv.map; + + jalv.unmap.handle = &jalv; + jalv.unmap.unmap = unmap_uri; + unmap_feature.data = &jalv.unmap; + + lv2_atom_forge_init(&jalv.forge, &jalv.map); + + jalv.sratom = sratom_new(&jalv.map); + jalv.ui_sratom = sratom_new(&jalv.map); + + jalv.midi_event_id = uri_to_id( + &jalv, "http://lv2plug.in/ns/ext/event", LV2_MIDI__MidiEvent); + + jalv.urids.atom_Float = symap_map(jalv.symap, LV2_ATOM__Float); + jalv.urids.atom_Int = symap_map(jalv.symap, LV2_ATOM__Int); + jalv.urids.atom_eventTransfer = symap_map(jalv.symap, LV2_ATOM__eventTransfer); + jalv.urids.bufsz_maxBlockLength = symap_map(jalv.symap, LV2_BUF_SIZE__maxBlockLength); + jalv.urids.bufsz_minBlockLength = symap_map(jalv.symap, LV2_BUF_SIZE__minBlockLength); + jalv.urids.bufsz_sequenceSize = symap_map(jalv.symap, LV2_BUF_SIZE__sequenceSize); + jalv.urids.log_Trace = symap_map(jalv.symap, LV2_LOG__Trace); + jalv.urids.midi_MidiEvent = symap_map(jalv.symap, LV2_MIDI__MidiEvent); + jalv.urids.param_sampleRate = symap_map(jalv.symap, LV2_PARAMETERS__sampleRate); + jalv.urids.patch_Set = symap_map(jalv.symap, LV2_PATCH__Set); + jalv.urids.patch_property = symap_map(jalv.symap, LV2_PATCH__property); + jalv.urids.patch_value = symap_map(jalv.symap, LV2_PATCH__value); + jalv.urids.time_Position = symap_map(jalv.symap, LV2_TIME__Position); + jalv.urids.time_bar = symap_map(jalv.symap, LV2_TIME__bar); + jalv.urids.time_barBeat = symap_map(jalv.symap, LV2_TIME__barBeat); + jalv.urids.time_beatUnit = symap_map(jalv.symap, LV2_TIME__beatUnit); + jalv.urids.time_beatsPerBar = symap_map(jalv.symap, LV2_TIME__beatsPerBar); + jalv.urids.time_beatsPerMinute = symap_map(jalv.symap, LV2_TIME__beatsPerMinute); + jalv.urids.time_frame = symap_map(jalv.symap, LV2_TIME__frame); + jalv.urids.time_speed = symap_map(jalv.symap, LV2_TIME__speed); + jalv.urids.ui_updateRate = symap_map(jalv.symap, LV2_UI__updateRate); + + + +#ifdef _WIN32 + jalv.temp_dir = jalv_strdup("jalvXXXXXX"); + _mktemp(jalv.temp_dir); +#else + char* templ = jalv_strdup("/tmp/jalv-XXXXXX"); + jalv.temp_dir = jalv_strjoin(mkdtemp(templ), "/"); + free(templ); +#endif + + LV2_State_Make_Path make_path = { &jalv, jalv_make_path }; + make_path_feature.data = &make_path; + + LV2_Worker_Schedule schedule = { &jalv, jalv_worker_schedule }; + schedule_feature.data = &schedule; + + LV2_Log_Log llog = { &jalv, jalv_printf, jalv_vprintf }; + log_feature.data = &llog; + + zix_sem_init(&exit_sem, 0); + jalv.done = &exit_sem; + + zix_sem_init(&jalv.paused, 0); +// zix_sem_init(&jalv.worker.sem, 0); + + signal(SIGINT, signal_handler); + signal(SIGTERM, signal_handler); + + /* Find all installed plugins */ + LilvWorld* world = lilv_world_new(); + lilv_world_load_all(world); + jalv.world = world; + const LilvPlugins* plugins = lilv_world_get_all_plugins(world); + + /* Cache URIs for concepts we'll use */ + jalv.nodes.atom_AtomPort = lilv_new_uri(world, LV2_ATOM__AtomPort); + jalv.nodes.atom_Chunk = lilv_new_uri(world, LV2_ATOM__Chunk); + jalv.nodes.atom_Sequence = lilv_new_uri(world, LV2_ATOM__Sequence); + jalv.nodes.ev_EventPort = lilv_new_uri(world, LV2_EVENT__EventPort); + jalv.nodes.lv2_AudioPort = lilv_new_uri(world, LV2_CORE__AudioPort); + jalv.nodes.lv2_ControlPort = lilv_new_uri(world, LV2_CORE__ControlPort); + jalv.nodes.lv2_InputPort = lilv_new_uri(world, LV2_CORE__InputPort); + jalv.nodes.lv2_OutputPort = lilv_new_uri(world, LV2_CORE__OutputPort); + jalv.nodes.lv2_connectionOptional = lilv_new_uri(world, LV2_CORE__connectionOptional); + jalv.nodes.lv2_control = lilv_new_uri(world, LV2_CORE__control); + jalv.nodes.lv2_name = lilv_new_uri(world, LV2_CORE__name); + jalv.nodes.midi_MidiEvent = lilv_new_uri(world, LV2_MIDI__MidiEvent); + jalv.nodes.pg_group = lilv_new_uri(world, LV2_PORT_GROUPS__group); + jalv.nodes.pset_Preset = lilv_new_uri(world, LV2_PRESETS__Preset); + jalv.nodes.rdfs_label = lilv_new_uri(world, LILV_NS_RDFS "label"); + jalv.nodes.rsz_minimumSize = lilv_new_uri(world, LV2_RESIZE_PORT__minimumSize); + jalv.nodes.work_interface = lilv_new_uri(world, LV2_WORKER__interface); + jalv.nodes.work_schedule = lilv_new_uri(world, LV2_WORKER__schedule); + jalv.nodes.end = NULL; + + /* Get plugin URI from loaded state or command line */ + LilvState* state = NULL; + LilvNode* plugin_uri = NULL; + if (jalv.opts.load) { + struct stat info; + stat(jalv.opts.load, &info); + if (S_ISDIR(info.st_mode)) { + char* path = jalv_strjoin(jalv.opts.load, "/state.ttl"); + state = lilv_state_new_from_file(jalv.world, &jalv.map, NULL, path); + free(path); + } else { + state = lilv_state_new_from_file(jalv.world, &jalv.map, NULL, + jalv.opts.load); + } + if (!state) { + fprintf(stderr, "Failed to load state from %s\n", jalv.opts.load); + return EXIT_FAILURE; + } + plugin_uri = lilv_node_duplicate(lilv_state_get_plugin_uri(state)); + } else if (argc > 1) { + plugin_uri = lilv_new_uri(world, argv[argc - 1]); + } + + if (!plugin_uri) { + fprintf(stderr, "Missing plugin URI, try lv2ls to list plugins\n"); + return EXIT_FAILURE; + } + + /* Find plugin */ + printf("Plugin: %s\n", lilv_node_as_string(plugin_uri)); + jalv.plugin = lilv_plugins_get_by_uri(plugins, plugin_uri); + lilv_node_free(plugin_uri); + if (!jalv.plugin) { + fprintf(stderr, "Failed to find plugin\n"); + lilv_world_free(world); + return EXIT_FAILURE; + } + + /* Check that any required features are supported */ + LilvNodes* req_feats = lilv_plugin_get_required_features(jalv.plugin); + LILV_FOREACH(nodes, f, req_feats) { + const char* uri = lilv_node_as_uri(lilv_nodes_get(req_feats, f)); + if (!feature_is_supported(uri)) { + fprintf(stderr, "Feature %s is not supported\n", uri); + lilv_world_free(world); + return EXIT_FAILURE; + } + } + lilv_nodes_free(req_feats); + + if (!state) { + printf("Creating new default state for plugin\n"); + /* Not restoring state, load the plugin as a preset to get default */ + state = lilv_state_new_from_world( + jalv.world, &jalv.map, lilv_plugin_get_uri(jalv.plugin)); + } + + /* Create port structures (jalv.ports) */ + jalv_create_ports(&jalv); + +//a lilvnode is just basically any plugin configuration parameter... + /* Get the plugin's name */ + LilvNode* name = lilv_plugin_get_name(jalv.plugin); + const char* name_str = lilv_node_as_string(name); + + lilv_node_free(name); + + jalv.sample_rate = SAMPLE_RATE; + jalv.block_length = 1024; //TODO used to be 256 + jalv.midi_buf_size = 32768; //used to be 256 + + printf("Block length: %u frames\n", jalv.block_length); + printf("MIDI buffers: %zu bytes\n", jalv.midi_buf_size); + + if (jalv.opts.buffer_size == 0) { + /* The UI ring is fed by plugin output ports (usually one), and the UI + updates roughly once per cycle. The ring size is a few times the + size of the MIDI output to give the UI a chance to keep up. The UI + should be able to keep up with 4 cycles, and tests show this works + for me, but this value might need increasing to avoid overflows. + */ + jalv.opts.buffer_size = jalv.midi_buf_size * N_BUFFER_CYCLES; + } + + /* The UI can only go so fast, clamp to reasonable limits */ + jalv.ui_update_hz = MIN(60, jalv.ui_update_hz); + jalv.opts.buffer_size = MAX(4096, jalv.opts.buffer_size); + fprintf(stderr, "Comm buffers: %d bytes\n", jalv.opts.buffer_size); + fprintf(stderr, "Update rate: %.01f Hz\n", jalv.ui_update_hz); + + /* Build options array to pass to plugin */ + const LV2_Options_Option options[] = { + { LV2_OPTIONS_INSTANCE, 0, jalv.urids.param_sampleRate, + sizeof(float), jalv.urids.atom_Float, &jalv.sample_rate }, + { LV2_OPTIONS_INSTANCE, 0, jalv.urids.bufsz_minBlockLength, + sizeof(int32_t), jalv.urids.atom_Int, &jalv.block_length }, + { LV2_OPTIONS_INSTANCE, 0, jalv.urids.bufsz_maxBlockLength, + sizeof(int32_t), jalv.urids.atom_Int, &jalv.block_length }, + { LV2_OPTIONS_INSTANCE, 0, jalv.urids.bufsz_sequenceSize, + sizeof(int32_t), jalv.urids.atom_Int, &jalv.midi_buf_size }, + { LV2_OPTIONS_INSTANCE, 0, jalv.urids.ui_updateRate, + sizeof(float), jalv.urids.atom_Float, &jalv.ui_update_hz }, + { LV2_OPTIONS_INSTANCE, 0, 0, 0, 0, NULL } + }; + + options_feature.data = &options; + + /* Instantiate the plugin */ + jalv.instance = lilv_plugin_instantiate( + jalv.plugin, jalv.sample_rate, features); + if (!jalv.instance) { + die("Failed to instantiate plugin.\n"); + } + + ext_data.data_access = lilv_instance_get_descriptor(jalv.instance)->extension_data; + + fprintf(stderr, "\n"); + if (!jalv.buf_size_set) { + jalv_allocate_port_buffers(&jalv); + } + + /* Apply loaded state to plugin instance if necessary */ + if (state) { + jalv_apply_state(&jalv, state); + printf("applying state here \n"); + } + + if (jalv.opts.controls) { + for (char** c = jalv.opts.controls; *c; ++c) { + jalv_apply_control_arg(&jalv, *c); + } + } + + for (uint32_t i = 0; i < jalv.num_ports; ++i) { + activate_port(&jalv, i); + } + + /* Activate plugin */ + lilv_instance_activate(jalv.instance); + + +//FIXME get sample rate from above...right? yes + jalv.sample_rate = SAMPLE_RATE; + jalv.play_state = JALV_RUNNING; + + // open_wav_file here + char *output_file = "output.wav"; + size_t length = SAMPLE_RATE; + float sample_rate = SAMPLE_RATE; + int nchannels = 1; + SNDFILE *outfile = open_wav_file(output_file, sample_rate, nchannels, length); + process_midi_ctx_t process_midi_ctx; + process_midi_ctx.jalv = &jalv; + process_midi_ctx.outfile = outfile; + process_midi_ctx.sample_rate = sample_rate; + + load_midi_file("short_example.mid", (read_midi_callback)process_midi_cb, &process_midi_ctx); +// +//STUDY LATER + + + sf_close(outfile); + + /* Run UI (or prompt at console) */ +// jalv_open_ui(&jalv); + + +// zix_sem_wait(&exit_sem); +// jalv.exit = true; + + fprintf(stderr, "Exiting...\n"); + + /* Terminate the worker */ + + for (uint32_t i = 0; i < jalv.num_ports; ++i) { + if (jalv.ports[i].evbuf) { + lv2_evbuf_free(jalv.ports[i].evbuf); + } + } + + /* Deactivate plugin */ + lilv_instance_deactivate(jalv.instance); + lilv_instance_free(jalv.instance); + + /* Clean up */ + free(jalv.ports); + for (LilvNode** n = (LilvNode**)&jalv.nodes; *n; ++n) { + lilv_node_free(*n); + } + symap_free(jalv.symap); + zix_sem_destroy(&jalv.symap_lock); + sratom_free(jalv.sratom); + sratom_free(jalv.ui_sratom); + lilv_world_free(world); + + zix_sem_destroy(&exit_sem); + + remove(jalv.temp_dir); + free(jalv.temp_dir); + free(jalv.ui_event_buf); + + return 0; +} + diff --git a/src/LV2-render_console.c b/src/LV2-render_console.c new file mode 100644 index 0000000..1d421d2 --- /dev/null +++ b/src/LV2-render_console.c @@ -0,0 +1,73 @@ +#define _XOPEN_SOURCE 500 + +#include <stdbool.h> +#include <stdio.h> +#include <string.h> +#include <unistd.h> + +#include "jalv_config.h" +#include "LV2-render_internal.h" + +#include "lv2/lv2plug.in/ns/extensions/ui/ui.h" + +static int +print_usage(const char* name, bool error) +{ + FILE* const os = error ? stderr : stdout; + fprintf(os, "Usage: %s [OPTION...] PLUGIN_URI\n", name); + fprintf(os, "Render a midi file with an LV2 plugin instrument.\n"); + fprintf(os, " -h Display this help and exit\n"); + fprintf(os, " -p Print control output changes to stdout\n"); + fprintf(os, " -c SYM=VAL Set control value (e.g. \"vol=1.4\")\n"); + fprintf(os, " -l DIR Load state from save directory\n"); + fprintf(os, " -d DIR Dump plugin <=> UI communication\n"); + fprintf(os, " -b SIZE Buffer size for plugin <=> UI communication\n"); + return error ? 1 : 0; +} + +int +jalv_init(int* argc, char*** argv, JalvOptions* opts) +{ + opts->controls = malloc(sizeof(char*)); + opts->controls[0] = NULL; + + int n_controls = 0; + int a = 1; + for (; a < *argc && (*argv)[a][0] == '-'; ++a) { + if ((*argv)[a][1] == 'h') { + return print_usage((*argv)[0], true); + } else if ((*argv)[a][1] == 's') { + opts->show_ui = true; + } else if ((*argv)[a][1] == 'p') { + opts->print_controls = true; + } else if ((*argv)[a][1] == 'l') { + if (++a == *argc) { + fprintf(stderr, "Missing argument for -l\n"); + return 1; + } + opts->load = jalv_strdup((*argv)[a]); + } else if ((*argv)[a][1] == 'b') { + if (++a == *argc) { + fprintf(stderr, "Missing argument for -b\n"); + return 1; + } + opts->buffer_size = atoi((*argv)[a]); + } else if ((*argv)[a][1] == 'c') { + if (++a == *argc) { + fprintf(stderr, "Missing argument for -c\n"); + return 1; + } + opts->controls = realloc(opts->controls, + (++n_controls + 1) * sizeof(char*)); + opts->controls[n_controls - 1] = (*argv)[a]; + opts->controls[n_controls] = NULL; + } else if ((*argv)[a][1] == 'd') { + opts->dump = true; + } else { + fprintf(stderr, "Unknown option %s\n", (*argv)[a]); + return print_usage((*argv)[0], true); + } + } + + return 0; +} diff --git a/src/LV2-render_internal.h b/src/LV2-render_internal.h new file mode 100644 index 0000000..cac20b0 --- /dev/null +++ b/src/LV2-render_internal.h @@ -0,0 +1,280 @@ + +#ifndef JALV_INTERNAL_H +#define JALV_INTERNAL_H + +#include <stdlib.h> +#include <string.h> + +#include <jack/jack.h> +#include <jack/ringbuffer.h> + +#include "lilv/lilv.h" +#include "serd/serd.h" + +#include "lv2/lv2plug.in/ns/ext/atom/atom.h" +#include "lv2/lv2plug.in/ns/ext/atom/forge.h" +#include "lv2/lv2plug.in/ns/ext/log/log.h" +#include "lv2/lv2plug.in/ns/ext/midi/midi.h" +#include "lv2/lv2plug.in/ns/ext/resize-port/resize-port.h" +#include "lv2/lv2plug.in/ns/ext/state/state.h" +#include "lv2/lv2plug.in/ns/ext/urid/urid.h" +#include "lv2/lv2plug.in/ns/ext/worker/worker.h" + +#include "zix/sem.h" +#include "zix/thread.h" + +#include "sratom/sratom.h" + +#include "lv2_evbuf.h" +#include "symap.h" + +#ifdef __cplusplus +extern "C" { +#endif + +enum PortFlow { + FLOW_UNKNOWN, + FLOW_INPUT, + FLOW_OUTPUT +}; + +enum PortType { + TYPE_UNKNOWN, + TYPE_CONTROL, + TYPE_AUDIO, + TYPE_EVENT +}; + +struct Port { + const LilvPort* lilv_port; + enum PortType type; + enum PortFlow flow; + jack_port_t* jack_port; ///< For audio/MIDI ports, otherwise NULL + LV2_Evbuf* evbuf; ///< For MIDI ports, otherwise NULL + void* widget; ///< Control widget, if applicable + size_t buf_size; ///< Custom buffer size, or 0 + uint32_t index; ///< Port index + float control; ///< For control ports, otherwise 0.0f + bool old_api; ///< True for event, false for atom +}; + +/** + Control change event, sent through ring buffers for UI updates. +*/ +typedef struct { + uint32_t index; + uint32_t protocol; + uint32_t size; + uint8_t body[]; +} ControlChange; + +typedef struct { + char* uuid; ///< Session UUID + char* load; ///< Path for state to load + char** controls; ///< Control values + uint32_t buffer_size; ///< Plugin <= >UI communication buffer size + double update_rate; ///< UI update rate in Hz + int dump; ///< Dump communication iff true + int generic_ui; ///< Use generic UI iff true + int show_hidden; ///< Show controls for notOnGUI ports + int no_menu; ///< Hide menu iff true + int show_ui; ///< Show non-embedded UI + int print_controls; ///< Print control changes to stdout +} JalvOptions; + +typedef struct { + LV2_URID atom_Float; + LV2_URID atom_Int; + LV2_URID atom_eventTransfer; + LV2_URID bufsz_maxBlockLength; + LV2_URID bufsz_minBlockLength; + LV2_URID bufsz_sequenceSize; + LV2_URID log_Trace; + LV2_URID midi_MidiEvent; + LV2_URID param_sampleRate; + LV2_URID patch_Set; + LV2_URID patch_property; + LV2_URID patch_value; + LV2_URID time_Position; + LV2_URID time_bar; + LV2_URID time_barBeat; + LV2_URID time_beatUnit; + LV2_URID time_beatsPerBar; + LV2_URID time_beatsPerMinute; + LV2_URID time_frame; + LV2_URID time_speed; + LV2_URID ui_updateRate; +} JalvURIDs; + +typedef struct { + LilvNode* atom_AtomPort; + LilvNode* atom_Chunk; + LilvNode* atom_Sequence; + LilvNode* ev_EventPort; + LilvNode* lv2_AudioPort; + LilvNode* lv2_ControlPort; + LilvNode* lv2_InputPort; + LilvNode* lv2_OutputPort; + LilvNode* lv2_connectionOptional; + LilvNode* lv2_control; + LilvNode* lv2_name; + LilvNode* midi_MidiEvent; + LilvNode* pg_group; + LilvNode* pset_Preset; + LilvNode* rdfs_label; + LilvNode* rsz_minimumSize; + LilvNode* work_interface; + LilvNode* work_schedule; + LilvNode* end; ///< NULL terminator for easy freeing of entire structure +} JalvNodes; + +typedef enum { + JALV_RUNNING, + JALV_PAUSE_REQUESTED, + JALV_PAUSED +} JalvPlayState; + +typedef struct { + jack_ringbuffer_t* requests; ///< Requests to the worker + jack_ringbuffer_t* responses; ///< Responses from the worker + void* response; ///< Worker response buffer + ZixSem sem; ///< Worker semaphore + ZixThread thread; ///< Worker thread + const LV2_Worker_Interface* iface; ///< Plugin worker interface +} JalvWorker; + +typedef struct { + JalvOptions opts; ///< Command-line options + JalvURIDs urids; ///< URIDs + JalvNodes nodes; ///< Nodes + LV2_Atom_Forge forge; ///< Atom forge + const char* prog_name; ///< Program name (argv[0]) + LilvWorld* world; ///< Lilv World + LV2_URID_Map map; ///< URI => Int map + LV2_URID_Unmap unmap; ///< Int => URI map + Sratom* sratom; ///< Atom serialiser + Sratom* ui_sratom; ///< Atom serialiser for UI thread + Symap* symap; ///< URI map + ZixSem symap_lock; ///< Lock for URI map + jack_client_t* jack_client; ///< Jack client + jack_ringbuffer_t* ui_events; ///< Port events from UI + jack_ringbuffer_t* plugin_events; ///< Port events from plugin + void* ui_event_buf; ///< Buffer for reading UI port events + JalvWorker worker; ///< Worker thread implementation + ZixSem* done; ///< Exit semaphore + ZixSem paused; ///< Paused signal from process thread + JalvPlayState play_state; ///< Current play state + char* temp_dir; ///< Temporary plugin state directory + char* save_dir; ///< Plugin save directory + const LilvPlugin* plugin; ///< Plugin class (RDF data) + LilvUIs* uis; ///< All plugin UIs (RDF data) + const LilvUI* ui; ///< Plugin UI (RDF data) + const LilvNode* ui_type; ///< Plugin UI type (unwrapped) + LilvInstance* instance; ///< Plugin instance (shared library) + void* window; ///< Window (if applicable) + struct Port* ports; ///< Port array of size num_ports + uint32_t block_length; ///< Jack buffer size (block length) + size_t midi_buf_size; ///< Size of MIDI port buffers + uint32_t control_in; ///< Index of control input port + uint32_t num_ports; ///< Size of the two following arrays: + uint32_t longest_sym; ///< Longest port symbol + float ui_update_hz; ///< Frequency of UI updates + jack_nframes_t sample_rate; ///< Sample rate + jack_nframes_t event_delta_t; ///< Frames since last update sent to UI + uint32_t midi_event_id; ///< MIDI event class ID in event context + jack_nframes_t position; ///< Transport position in frames + float bpm; ///< Transport tempo in beats per minute + bool rolling; ///< Transport speed (0=stop, 1=play) + bool buf_size_set; ///< True iff buffer size callback fired + bool exit; ///< True iff execution is finished +} Jalv; + +int +jalv_init(int* argc, char*** argv, JalvOptions* opts); + +void +jalv_create_ports(Jalv* jalv); + +struct Port* +jalv_port_by_symbol(Jalv* jalv, const char* sym); + +typedef int (*PresetSink)(Jalv* jalv, + const LilvNode* node, + const LilvNode* title, + void* data); + +int +jalv_load_presets(Jalv* jalv, PresetSink sink, void* data); + +int +jalv_unload_presets(Jalv* jalv); + +int +jalv_apply_preset(Jalv* jalv, const LilvNode* preset); + +int +jalv_save_preset(Jalv* jalv, + const char* dir, + const char* uri, + const char* label, + const char* filename); + +void +jalv_save(Jalv* jalv, const char* dir); + +void +jalv_save_port_values(Jalv* jalv, + SerdWriter* writer, + const SerdNode* subject); +char* +jalv_make_path(LV2_State_Make_Path_Handle handle, + const char* path); + +void +jalv_apply_state(Jalv* jalv, LilvState* state); + +char* +atom_to_turtle(LV2_URID_Unmap* unmap, + const SerdNode* subject, + const SerdNode* predicate, + const LV2_Atom* atom); + +static inline char* +jalv_strdup(const char* str) +{ + const size_t len = strlen(str); + char* copy = (char*)malloc(len + 1); + memcpy(copy, str, len + 1); + return copy; +} + +static inline char* +jalv_strjoin(const char* a, const char* b) +{ + const size_t a_len = strlen(a); + const size_t b_len = strlen(b); + char* const out = (char*)malloc(a_len + b_len + 1); + + memcpy(out, a, a_len); + memcpy(out + a_len, b, b_len); + out[a_len + b_len] = '\0'; + + return out; +} + +int +jalv_printf(LV2_Log_Handle handle, + LV2_URID type, + const char* fmt, ...); + +int +jalv_vprintf(LV2_Log_Handle handle, + LV2_URID type, + const char* fmt, + va_list ap); + +#ifdef __cplusplus +} // extern "C" +#endif + +#endif // JALV_INTERNAL_H diff --git a/src/log.c b/src/log.c new file mode 100644 index 0000000..eed3298 --- /dev/null +++ b/src/log.c @@ -0,0 +1,43 @@ +/* + Copyright 2007-2012 David Robillard <http://drobilla.net> + + Permission to use, copy, modify, and/or distribute this software for any + purpose with or without fee is hereby granted, provided that the above + copyright notice and this permission notice appear in all copies. + + THIS SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. +*/ + +#include "LV2-render_internal.h" + +int +jalv_printf(LV2_Log_Handle handle, + LV2_URID type, + const char* fmt, ...) +{ + va_list args; + va_start(args, fmt); + const int ret = jalv_vprintf(handle, type, fmt, args); + va_end(args); + return ret; +} + +int +jalv_vprintf(LV2_Log_Handle handle, + LV2_URID type, + const char* fmt, + va_list ap) +{ + // TODO: Lock + Jalv* jalv = (Jalv*)handle; + if (type == jalv->urids.log_Trace && !jalv->opts.dump) { + return 0; + } + return vfprintf(stderr, fmt, ap); +} diff --git a/src/lv2_evbuf.c b/src/lv2_evbuf.c new file mode 100644 index 0000000..e8c9b60 --- /dev/null +++ b/src/lv2_evbuf.c @@ -0,0 +1,276 @@ +/* + Copyright 2008-2012 David Robillard <http://drobilla.net> + + Permission to use, copy, modify, and/or distribute this software for any + purpose with or without fee is hereby granted, provided that the above + copyright notice and this permission notice appear in all copies. + + THIS SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. +*/ + +#include <assert.h> +#include <stdlib.h> +#include <string.h> + +#include "lv2/lv2plug.in/ns/ext/atom/atom.h" +#include "lv2/lv2plug.in/ns/ext/event/event.h" + +#include "lv2_evbuf.h" + +struct LV2_Evbuf_Impl { + LV2_Evbuf_Type type; + uint32_t capacity; + uint32_t atom_Chunk; + uint32_t atom_Sequence; + union { + LV2_Event_Buffer event; + LV2_Atom_Sequence atom; + } buf; +}; + +static inline uint32_t +lv2_evbuf_pad_size(uint32_t size) +{ + return (size + 7) & (~7); +} + +LV2_Evbuf* +lv2_evbuf_new(uint32_t capacity, + LV2_Evbuf_Type type, + uint32_t atom_Chunk, + uint32_t atom_Sequence) +{ + // FIXME: memory must be 64-bit aligned + LV2_Evbuf* evbuf = (LV2_Evbuf*)malloc( + sizeof(LV2_Evbuf) + sizeof(LV2_Atom_Sequence) + capacity); + evbuf->capacity = capacity; + evbuf->atom_Chunk = atom_Chunk; + evbuf->atom_Sequence = atom_Sequence; + lv2_evbuf_set_type(evbuf, type); + lv2_evbuf_reset(evbuf, true); + return evbuf; +} + +void +lv2_evbuf_free(LV2_Evbuf* evbuf) +{ + free(evbuf); +} + +void +lv2_evbuf_set_type(LV2_Evbuf* evbuf, LV2_Evbuf_Type type) +{ + evbuf->type = type; + switch (type) { + case LV2_EVBUF_EVENT: + evbuf->buf.event.data = (uint8_t*)(evbuf + 1); + evbuf->buf.event.capacity = evbuf->capacity; + break; + case LV2_EVBUF_ATOM: + break; + } + lv2_evbuf_reset(evbuf, true); +} + +void +lv2_evbuf_reset(LV2_Evbuf* evbuf, bool input) +{ + switch (evbuf->type) { + case LV2_EVBUF_EVENT: + evbuf->buf.event.header_size = sizeof(LV2_Event_Buffer); + evbuf->buf.event.stamp_type = LV2_EVENT_AUDIO_STAMP; + evbuf->buf.event.event_count = 0; + evbuf->buf.event.size = 0; + break; + case LV2_EVBUF_ATOM: + if (input) { + evbuf->buf.atom.atom.size = sizeof(LV2_Atom_Sequence_Body); + evbuf->buf.atom.atom.type = evbuf->atom_Sequence; + } else { + evbuf->buf.atom.atom.size = evbuf->capacity; + evbuf->buf.atom.atom.type = evbuf->atom_Chunk; + } + } +} + +uint32_t +lv2_evbuf_get_size(LV2_Evbuf* evbuf) +{ + switch (evbuf->type) { + case LV2_EVBUF_EVENT: + return evbuf->buf.event.size; + case LV2_EVBUF_ATOM: + assert(evbuf->buf.atom.atom.type != evbuf->atom_Sequence + || evbuf->buf.atom.atom.size >= sizeof(LV2_Atom_Sequence_Body)); + return evbuf->buf.atom.atom.type == evbuf->atom_Sequence + ? evbuf->buf.atom.atom.size - sizeof(LV2_Atom_Sequence_Body) + : 0; + } + return 0; +} + +void* +lv2_evbuf_get_buffer(LV2_Evbuf* evbuf) +{ + switch (evbuf->type) { + case LV2_EVBUF_EVENT: + return &evbuf->buf.event; + case LV2_EVBUF_ATOM: + return &evbuf->buf.atom; + } + return NULL; +} + +LV2_Evbuf_Iterator +lv2_evbuf_begin(LV2_Evbuf* evbuf) +{ + LV2_Evbuf_Iterator iter = { evbuf, 0 }; + return iter; +} + +LV2_Evbuf_Iterator +lv2_evbuf_end(LV2_Evbuf* evbuf) +{ + const uint32_t size = lv2_evbuf_get_size(evbuf); + const LV2_Evbuf_Iterator iter = { evbuf, lv2_evbuf_pad_size(size) }; + return iter; +} + +bool +lv2_evbuf_is_valid(LV2_Evbuf_Iterator iter) +{ + return iter.offset < lv2_evbuf_get_size(iter.evbuf); +} + +LV2_Evbuf_Iterator +lv2_evbuf_next(LV2_Evbuf_Iterator iter) +{ + if (!lv2_evbuf_is_valid(iter)) { + return iter; + } + + LV2_Evbuf* evbuf = iter.evbuf; + uint32_t offset = iter.offset; + uint32_t size; + switch (evbuf->type) { + case LV2_EVBUF_EVENT: + size = ((LV2_Event*)(evbuf->buf.event.data + offset))->size; + offset += lv2_evbuf_pad_size(sizeof(LV2_Event) + size); + break; + case LV2_EVBUF_ATOM: + size = ((LV2_Atom_Event*) + ((char*)LV2_ATOM_CONTENTS(LV2_Atom_Sequence, &evbuf->buf.atom) + + offset))->body.size; + offset += lv2_evbuf_pad_size(sizeof(LV2_Atom_Event) + size); + break; + } + + LV2_Evbuf_Iterator next = { evbuf, offset }; + return next; +} + +bool +lv2_evbuf_get(LV2_Evbuf_Iterator iter, + uint32_t* frames, + uint32_t* subframes, + uint32_t* type, + uint32_t* size, + uint8_t** data) +{ + *frames = *subframes = *type = *size = 0; + *data = NULL; + + if (!lv2_evbuf_is_valid(iter)) { + return false; + } + + LV2_Event_Buffer* ebuf; + LV2_Event* ev; + LV2_Atom_Sequence* aseq; + LV2_Atom_Event* aev; + switch (iter.evbuf->type) { + case LV2_EVBUF_EVENT: + ebuf = &iter.evbuf->buf.event; + ev = (LV2_Event*)((char*)ebuf->data + iter.offset); + *frames = ev->frames; + *subframes = ev->subframes; + *type = ev->type; + *size = ev->size; + *data = (uint8_t*)ev + sizeof(LV2_Event); + break; + case LV2_EVBUF_ATOM: + aseq = (LV2_Atom_Sequence*)&iter.evbuf->buf.atom; + aev = (LV2_Atom_Event*)( + (char*)LV2_ATOM_CONTENTS(LV2_Atom_Sequence, aseq) + + iter.offset); + *frames = aev->time.frames; + *subframes = 0; + *type = aev->body.type; + *size = aev->body.size; + *data = LV2_ATOM_BODY(&aev->body); + break; + } + + return true; +} + +bool +lv2_evbuf_write(LV2_Evbuf_Iterator* iter, + uint32_t frames, + uint32_t subframes, + uint32_t type, + uint32_t size, + const uint8_t* data) +{ + LV2_Event_Buffer* ebuf; + LV2_Event* ev; + LV2_Atom_Sequence* aseq; + LV2_Atom_Event* aev; + switch (iter->evbuf->type) { + case LV2_EVBUF_EVENT: + ebuf = &iter->evbuf->buf.event; + if (ebuf->capacity - ebuf->size < sizeof(LV2_Event) + size) { + return false; + } + + ev = (LV2_Event*)(ebuf->data + iter->offset); + ev->frames = frames; + ev->subframes = subframes; + ev->type = type; + ev->size = size; + memcpy((uint8_t*)ev + sizeof(LV2_Event), data, size); + + size = lv2_evbuf_pad_size(sizeof(LV2_Event) + size); + ebuf->size += size; + ebuf->event_count += 1; + iter->offset += size; + break; + case LV2_EVBUF_ATOM: + aseq = (LV2_Atom_Sequence*)&iter->evbuf->buf.atom; + if (iter->evbuf->capacity - sizeof(LV2_Atom) - aseq->atom.size + < sizeof(LV2_Atom_Event) + size) { + return false; + } + + aev = (LV2_Atom_Event*)( + (char*)LV2_ATOM_CONTENTS(LV2_Atom_Sequence, aseq) + + iter->offset); + aev->time.frames = frames; + aev->body.type = type; + aev->body.size = size; + memcpy(LV2_ATOM_BODY(&aev->body), data, size); + + size = lv2_evbuf_pad_size(sizeof(LV2_Atom_Event) + size); + aseq->atom.size += size; + iter->offset += size; + break; + } + + return true; +} diff --git a/src/lv2_evbuf.h b/src/lv2_evbuf.h new file mode 100644 index 0000000..6dc4e69 --- /dev/null +++ b/src/lv2_evbuf.h @@ -0,0 +1,149 @@ +#ifndef LV2_EVBUF_H +#define LV2_EVBUF_H + +#include <stdint.h> + +#ifdef __cplusplus +extern "C" { +#else +#include <stdbool.h> +#endif + +/** + Format of actual buffer. +*/ +typedef enum { + /** + An (old) ev:EventBuffer (LV2_Event_Buffer). + */ + LV2_EVBUF_EVENT, + + /** + A (new) atom:Sequence (LV2_Atom_Sequence). + */ + LV2_EVBUF_ATOM +} LV2_Evbuf_Type; + +/** + An abstract/opaque LV2 event buffer. +*/ +typedef struct LV2_Evbuf_Impl LV2_Evbuf; + +/** + An iterator over an LV2_Evbuf. +*/ +typedef struct { + LV2_Evbuf* evbuf; + uint32_t offset; +} LV2_Evbuf_Iterator; + +/** + Allocate a new, empty event buffer. + URIDs for atom:Chunk and atom:Sequence must be passed for LV2_EVBUF_ATOM. +*/ +LV2_Evbuf* +lv2_evbuf_new(uint32_t capacity, + LV2_Evbuf_Type type, + uint32_t atom_Chunk, + uint32_t atom_Sequence); + +/** + Free an event buffer allocated with lv2_evbuf_new. +*/ +void +lv2_evbuf_free(LV2_Evbuf* evbuf); + +/** + Reset and change the type of an existing event buffer. + URIDs for atom:Chunk and atom:Sequence must be passed for LV2_EVBUF_ATOM. +*/ +void +lv2_evbuf_set_type(LV2_Evbuf* evbuf, LV2_Evbuf_Type type); + +/** + Clear and initialize an existing event buffer. + The contents of buf are ignored entirely and overwritten, except capacity + which is unmodified. + If input is false and this is an atom buffer, the buffer will be prepared + for writing by the plugin. This MUST be called before every run cycle. +*/ +void +lv2_evbuf_reset(LV2_Evbuf* evbuf, bool input); + +/** + Return the total padded size of the events stored in the buffer. +*/ +uint32_t +lv2_evbuf_get_size(LV2_Evbuf* evbuf); + +/** + Return the actual buffer implementation. + The format of the buffer returned depends on the buffer type. +*/ +void* +lv2_evbuf_get_buffer(LV2_Evbuf* evbuf); + +/** + Return an iterator to the start of `evbuf`. +*/ +LV2_Evbuf_Iterator +lv2_evbuf_begin(LV2_Evbuf* evbuf); + +/** + Return an iterator to the end of `evbuf`. +*/ +LV2_Evbuf_Iterator +lv2_evbuf_end(LV2_Evbuf* evbuf); + +/** + Check if `iter` is valid. + @return True if `iter` is valid, otherwise false (past end of buffer) +*/ +bool +lv2_evbuf_is_valid(LV2_Evbuf_Iterator iter); + +/** + Advance `iter` forward one event. + `iter` must be valid. + @return True if `iter` is valid, otherwise false (reached end of buffer) +*/ +LV2_Evbuf_Iterator +lv2_evbuf_next(LV2_Evbuf_Iterator iter); + +/** + Dereference an event iterator (i.e. get the event currently pointed to). + `iter` must be valid. + `type` Set to the type of the event. + `size` Set to the size of the event. + `data` Set to the contents of the event. + @return True on success. +*/ +bool +lv2_evbuf_get(LV2_Evbuf_Iterator iter, + uint32_t* frames, + uint32_t* subframes, + uint32_t* type, + uint32_t* size, + uint8_t** data); + +/** + Write an event at `iter`. + The event (if any) pointed to by `iter` will be overwritten, and `iter` + incremented to point to the following event (i.e. several calls to this + function can be done in sequence without twiddling iter in-between). + @return True if event was written, otherwise false (buffer is full). +//so this function takes frames and subframes...I guess frames are samples, but you don't know offhand what subframes are, do you? don't know, i saw 0 for it in atom write ohh hmm, maybe it's just extra data or something? looks so ok cool +*/ +bool +lv2_evbuf_write(LV2_Evbuf_Iterator* iter, + uint32_t frames, //what about this? it's asking for frames though? well it seems like this is position inside one buffer, so since we have event always at start of processing we just set it to 0 ok I'll have to think about that + uint32_t subframes, + uint32_t type, + uint32_t size, + const uint8_t* data); + +#ifdef __cplusplus +} +#endif + +#endif /* LV2_EVBUF_H */ diff --git a/src/midi/Makefile b/src/midi/Makefile new file mode 100644 index 0000000..e65bef3 --- /dev/null +++ b/src/midi/Makefile @@ -0,0 +1,645 @@ +# Makefile.in generated by automake 1.14.1 from Makefile.am. +# src/midi/Makefile. Generated from Makefile.in by configure. + +# Copyright (C) 1994-2013 Free Software Foundation, Inc. + +# This Makefile.in is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY, to the extent permitted by law; without +# even the implied warranty of MERCHANTABILITY or FITNESS FOR A +# PARTICULAR PURPOSE. + + + + + +am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' +am__make_running_with_option = \ + case $${target_option-} in \ + ?) ;; \ + *) echo "am__make_running_with_option: internal error: invalid" \ + "target option '$${target_option-}' specified" >&2; \ + exit 1;; \ + esac; \ + has_opt=no; \ + sane_makeflags=$$MAKEFLAGS; \ + if $(am__is_gnu_make); then \ + sane_makeflags=$$MFLAGS; \ + else \ + case $$MAKEFLAGS in \ + *\\[\ \ ]*) \ + bs=\\; \ + sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ + | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ + esac; \ + fi; \ + skip_next=no; \ + strip_trailopt () \ + { \ + flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ + }; \ + for flg in $$sane_makeflags; do \ + test $$skip_next = yes && { skip_next=no; continue; }; \ + case $$flg in \ + *=*|--*) continue;; \ + -*I) strip_trailopt 'I'; skip_next=yes;; \ + -*I?*) strip_trailopt 'I';; \ + -*O) strip_trailopt 'O'; skip_next=yes;; \ + -*O?*) strip_trailopt 'O';; \ + -*l) strip_trailopt 'l'; skip_next=yes;; \ + -*l?*) strip_trailopt 'l';; \ + -[dEDm]) skip_next=yes;; \ + -[JT]) skip_next=yes;; \ + esac; \ + case $$flg in \ + *$$target_option*) has_opt=yes; break;; \ + esac; \ + done; \ + test $$has_opt = yes +am__make_dryrun = (target_option=n; $(am__make_running_with_option)) +am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) +pkgdatadir = $(datadir)/cli-dssi-host +pkgincludedir = $(includedir)/cli-dssi-host +pkglibdir = $(libdir)/cli-dssi-host +pkglibexecdir = $(libexecdir)/cli-dssi-host +am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd +install_sh_DATA = $(install_sh) -c -m 644 +install_sh_PROGRAM = $(install_sh) -c +install_sh_SCRIPT = $(install_sh) -c +INSTALL_HEADER = $(INSTALL_DATA) +transform = $(program_transform_name) +NORMAL_INSTALL = : +PRE_INSTALL = : +POST_INSTALL = : +NORMAL_UNINSTALL = : +PRE_UNINSTALL = : +POST_UNINSTALL = : +build_triplet = x86_64-unknown-linux-gnu +host_triplet = x86_64-unknown-linux-gnu +bin_PROGRAMS = test_midi_loader$(EXEEXT) +subdir = src/midi +DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ + $(top_srcdir)/mkinstalldirs $(top_srcdir)/depcomp +ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 +am__aclocal_m4_deps = $(top_srcdir)/configure.ac +am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ + $(ACLOCAL_M4) +mkinstalldirs = $(SHELL) $(top_srcdir)/mkinstalldirs +CONFIG_CLEAN_FILES = +CONFIG_CLEAN_VPATH_FILES = +LTLIBRARIES = $(noinst_LTLIBRARIES) +libfluidmidi_la_LIBADD = +am_libfluidmidi_la_OBJECTS = fluid_midi.lo fluid_list.lo \ + midi_loader.lo +libfluidmidi_la_OBJECTS = $(am_libfluidmidi_la_OBJECTS) +AM_V_lt = $(am__v_lt_$(V)) +am__v_lt_ = $(am__v_lt_$(AM_DEFAULT_VERBOSITY)) +am__v_lt_0 = --silent +am__v_lt_1 = +am__installdirs = "$(DESTDIR)$(bindir)" +PROGRAMS = $(bin_PROGRAMS) +am_test_midi_loader_OBJECTS = test_midi_loader.$(OBJEXT) +test_midi_loader_OBJECTS = $(am_test_midi_loader_OBJECTS) +test_midi_loader_DEPENDENCIES = libfluidmidi.la +AM_V_P = $(am__v_P_$(V)) +am__v_P_ = $(am__v_P_$(AM_DEFAULT_VERBOSITY)) +am__v_P_0 = false +am__v_P_1 = : +AM_V_GEN = $(am__v_GEN_$(V)) +am__v_GEN_ = $(am__v_GEN_$(AM_DEFAULT_VERBOSITY)) +am__v_GEN_0 = @echo " GEN " $@; +am__v_GEN_1 = +AM_V_at = $(am__v_at_$(V)) +am__v_at_ = $(am__v_at_$(AM_DEFAULT_VERBOSITY)) +am__v_at_0 = @ +am__v_at_1 = +DEFAULT_INCLUDES = -I. +depcomp = $(SHELL) $(top_srcdir)/depcomp +am__depfiles_maybe = depfiles +am__mv = mv -f +COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ + $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) +LTCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ + $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) \ + $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ + $(AM_CFLAGS) $(CFLAGS) +AM_V_CC = $(am__v_CC_$(V)) +am__v_CC_ = $(am__v_CC_$(AM_DEFAULT_VERBOSITY)) +am__v_CC_0 = @echo " CC " $@; +am__v_CC_1 = +CCLD = $(CC) +LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ + $(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \ + $(AM_LDFLAGS) $(LDFLAGS) -o $@ +AM_V_CCLD = $(am__v_CCLD_$(V)) +am__v_CCLD_ = $(am__v_CCLD_$(AM_DEFAULT_VERBOSITY)) +am__v_CCLD_0 = @echo " CCLD " $@; +am__v_CCLD_1 = +SOURCES = $(libfluidmidi_la_SOURCES) $(test_midi_loader_SOURCES) +DIST_SOURCES = $(libfluidmidi_la_SOURCES) $(test_midi_loader_SOURCES) +am__can_run_installinfo = \ + case $$AM_UPDATE_INFO_DIR in \ + n|no|NO) false;; \ + *) (install-info --version) >/dev/null 2>&1;; \ + esac +am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) +# Read a list of newline-separated strings from the standard input, +# and print each of them once, without duplicates. Input order is +# *not* preserved. +am__uniquify_input = $(AWK) '\ + BEGIN { nonempty = 0; } \ + { items[$$0] = 1; nonempty = 1; } \ + END { if (nonempty) { for (i in items) print i; }; } \ +' +# Make sure the list of sources is unique. This is necessary because, +# e.g., the same source file might be shared among _SOURCES variables +# for different programs/libraries. +am__define_uniq_tagged_files = \ + list='$(am__tagged_files)'; \ + unique=`for i in $$list; do \ + if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ + done | $(am__uniquify_input)` +ETAGS = etags +CTAGS = ctags +DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) +ACLOCAL = ${SHELL} /home/pepper/DSSI/dssi-render/missing aclocal-1.14 +ALSA_CFLAGS = -I/usr/include/alsa +ALSA_LIBS = -lasound +AMTAR = $${TAR-tar} +AM_DEFAULT_VERBOSITY = 1 +AR = ar +AUTOCONF = ${SHELL} /home/pepper/DSSI/dssi-render/missing autoconf +AUTOHEADER = ${SHELL} /home/pepper/DSSI/dssi-render/missing autoheader +AUTOMAKE = ${SHELL} /home/pepper/DSSI/dssi-render/missing automake-1.14 +AWK = gawk +CC = gcc +CCDEPMODE = depmode=gcc3 +CFLAGS = -g -O2 +CPP = gcc -E +CPPFLAGS = +CYGPATH_W = echo +DEFS = -DPACKAGE_NAME=\"\" -DPACKAGE_TARNAME=\"\" -DPACKAGE_VERSION=\"\" -DPACKAGE_STRING=\"\" -DPACKAGE_BUGREPORT=\"\" -DPACKAGE_URL=\"\" -DSTDC_HEADERS=1 -DHAVE_SYS_TYPES_H=1 -DHAVE_SYS_STAT_H=1 -DHAVE_STDLIB_H=1 -DHAVE_STRING_H=1 -DHAVE_MEMORY_H=1 -DHAVE_STRINGS_H=1 -DHAVE_INTTYPES_H=1 -DHAVE_STDINT_H=1 -DHAVE_UNISTD_H=1 -DHAVE_DLFCN_H=1 -DLT_OBJDIR=\".libs/\" -DPACKAGE=\"cli-dssi-host\" -DVERSION=\"0.1.3\" +DEPDIR = .deps +DLLTOOL = false +DSSI_CFLAGS = +DSSI_LIBS = +DSYMUTIL = +DUMPBIN = +ECHO_C = +ECHO_N = -n +ECHO_T = +EGREP = /usr/bin/grep -E +EXEEXT = +FGREP = /usr/bin/grep -F +GREP = /usr/bin/grep +INSTALL = /usr/bin/install -c +INSTALL_DATA = ${INSTALL} -m 644 +INSTALL_PROGRAM = ${INSTALL} +INSTALL_SCRIPT = ${INSTALL} +INSTALL_STRIP_PROGRAM = $(install_sh) -c -s +LD = /usr/bin/ld -m elf_x86_64 +LDFLAGS = +LIBOBJS = +LIBS = +LIBTOOL = $(SHELL) $(top_builddir)/libtool +LIPO = +LN_S = ln -s +LTLIBOBJS = +MAKEINFO = ${SHELL} /home/pepper/DSSI/dssi-render/missing makeinfo +MANIFEST_TOOL = : +MKDIR_P = /usr/bin/mkdir -p +NM = /usr/bin/nm -B +NMEDIT = +OBJDUMP = objdump +OBJEXT = o +OTOOL = +OTOOL64 = +PACKAGE = cli-dssi-host +PACKAGE_BUGREPORT = +PACKAGE_NAME = +PACKAGE_STRING = +PACKAGE_TARNAME = +PACKAGE_URL = +PACKAGE_VERSION = +PATH_SEPARATOR = : +PKG_CONFIG = /usr/bin/pkg-config +PKG_CONFIG_LIBDIR = +PKG_CONFIG_PATH = +RANLIB = ranlib +SED = /usr/bin/sed +SET_MAKE = +SHELL = /bin/sh +SNDFILE_CFLAGS = +SNDFILE_LIBS = -lsndfile +STRIP = strip +VERSION = 0.1.3 +abs_builddir = /home/pepper/DSSI/dssi-render/src/midi +abs_srcdir = /home/pepper/DSSI/dssi-render/src/midi +abs_top_builddir = /home/pepper/DSSI/dssi-render +abs_top_srcdir = /home/pepper/DSSI/dssi-render +ac_ct_AR = ar +ac_ct_CC = gcc +ac_ct_DUMPBIN = +am__include = include +am__leading_dot = . +am__quote = +am__tar = $${TAR-tar} chof - "$$tardir" +am__untar = $${TAR-tar} xf - +bindir = ${exec_prefix}/bin +build = x86_64-unknown-linux-gnu +build_alias = +build_cpu = x86_64 +build_os = linux-gnu +build_vendor = unknown +builddir = . +datadir = ${datarootdir} +datarootdir = ${prefix}/share +docdir = ${datarootdir}/doc/${PACKAGE} +dvidir = ${docdir} +exec_prefix = ${prefix} +host = x86_64-unknown-linux-gnu +host_alias = +host_cpu = x86_64 +host_os = linux-gnu +host_vendor = unknown +htmldir = ${docdir} +includedir = ${prefix}/include +infodir = ${datarootdir}/info +install_sh = ${SHELL} /home/pepper/DSSI/dssi-render/install-sh +libdir = ${exec_prefix}/lib +libexecdir = ${exec_prefix}/libexec +localedir = ${datarootdir}/locale +localstatedir = ${prefix}/var +mandir = ${datarootdir}/man +mkdir_p = $(MKDIR_P) +oldincludedir = /usr/include +pdfdir = ${docdir} +prefix = /usr/local +program_transform_name = s,x,x, +psdir = ${docdir} +sbindir = ${exec_prefix}/sbin +sharedstatedir = ${prefix}/com +srcdir = . +sysconfdir = ${prefix}/etc +target_alias = +top_build_prefix = ../../ +top_builddir = ../.. +top_srcdir = ../.. +noinst_LTLIBRARIES = libfluidmidi.la +libfluidmidi_la_SOURCES = fluid_midi.c fluid_list.c midi_loader.c +test_midi_loader_SOURCES = test_midi_loader.c +test_midi_loader_LDADD = -lm -ldl libfluidmidi.la +all: all-am + +.SUFFIXES: +.SUFFIXES: .c .lo .o .obj +$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) + @for dep in $?; do \ + case '$(am__configure_deps)' in \ + *$$dep*) \ + ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ + && { if test -f $@; then exit 0; else break; fi; }; \ + exit 1;; \ + esac; \ + done; \ + echo ' cd $(top_srcdir) && $(AUTOMAKE) --gnu src/midi/Makefile'; \ + $(am__cd) $(top_srcdir) && \ + $(AUTOMAKE) --gnu src/midi/Makefile +.PRECIOUS: Makefile +Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status + @case '$?' in \ + *config.status*) \ + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ + *) \ + echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ + cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ + esac; + +$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh + +$(top_srcdir)/configure: $(am__configure_deps) + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh +$(ACLOCAL_M4): $(am__aclocal_m4_deps) + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh +$(am__aclocal_m4_deps): + +clean-noinstLTLIBRARIES: + -test -z "$(noinst_LTLIBRARIES)" || rm -f $(noinst_LTLIBRARIES) + @list='$(noinst_LTLIBRARIES)'; \ + locs=`for p in $$list; do echo $$p; done | \ + sed 's|^[^/]*$$|.|; s|/[^/]*$$||; s|$$|/so_locations|' | \ + sort -u`; \ + test -z "$$locs" || { \ + echo rm -f $${locs}; \ + rm -f $${locs}; \ + } + +libfluidmidi.la: $(libfluidmidi_la_OBJECTS) $(libfluidmidi_la_DEPENDENCIES) $(EXTRA_libfluidmidi_la_DEPENDENCIES) + $(AM_V_CCLD)$(LINK) $(libfluidmidi_la_OBJECTS) $(libfluidmidi_la_LIBADD) $(LIBS) +install-binPROGRAMS: $(bin_PROGRAMS) + @$(NORMAL_INSTALL) + @list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \ + if test -n "$$list"; then \ + echo " $(MKDIR_P) '$(DESTDIR)$(bindir)'"; \ + $(MKDIR_P) "$(DESTDIR)$(bindir)" || exit 1; \ + fi; \ + for p in $$list; do echo "$$p $$p"; done | \ + sed 's/$(EXEEXT)$$//' | \ + while read p p1; do if test -f $$p \ + || test -f $$p1 \ + ; then echo "$$p"; echo "$$p"; else :; fi; \ + done | \ + sed -e 'p;s,.*/,,;n;h' \ + -e 's|.*|.|' \ + -e 'p;x;s,.*/,,;s/$(EXEEXT)$$//;$(transform);s/$$/$(EXEEXT)/' | \ + sed 'N;N;N;s,\n, ,g' | \ + $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1 } \ + { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \ + if ($$2 == $$4) files[d] = files[d] " " $$1; \ + else { print "f", $$3 "/" $$4, $$1; } } \ + END { for (d in files) print "f", d, files[d] }' | \ + while read type dir files; do \ + if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \ + test -z "$$files" || { \ + echo " $(INSTALL_PROGRAM_ENV) $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL_PROGRAM) $$files '$(DESTDIR)$(bindir)$$dir'"; \ + $(INSTALL_PROGRAM_ENV) $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL_PROGRAM) $$files "$(DESTDIR)$(bindir)$$dir" || exit $$?; \ + } \ + ; done + +uninstall-binPROGRAMS: + @$(NORMAL_UNINSTALL) + @list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \ + files=`for p in $$list; do echo "$$p"; done | \ + sed -e 'h;s,^.*/,,;s/$(EXEEXT)$$//;$(transform)' \ + -e 's/$$/$(EXEEXT)/' \ + `; \ + test -n "$$list" || exit 0; \ + echo " ( cd '$(DESTDIR)$(bindir)' && rm -f" $$files ")"; \ + cd "$(DESTDIR)$(bindir)" && rm -f $$files + +clean-binPROGRAMS: + @list='$(bin_PROGRAMS)'; test -n "$$list" || exit 0; \ + echo " rm -f" $$list; \ + rm -f $$list || exit $$?; \ + test -n "$(EXEEXT)" || exit 0; \ + list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \ + echo " rm -f" $$list; \ + rm -f $$list + +test_midi_loader$(EXEEXT): $(test_midi_loader_OBJECTS) $(test_midi_loader_DEPENDENCIES) $(EXTRA_test_midi_loader_DEPENDENCIES) + @rm -f test_midi_loader$(EXEEXT) + $(AM_V_CCLD)$(LINK) $(test_midi_loader_OBJECTS) $(test_midi_loader_LDADD) $(LIBS) + +mostlyclean-compile: + -rm -f *.$(OBJEXT) + +distclean-compile: + -rm -f *.tab.c + +include ./$(DEPDIR)/fluid_list.Plo +include ./$(DEPDIR)/fluid_midi.Plo +include ./$(DEPDIR)/midi_loader.Plo +include ./$(DEPDIR)/test_midi_loader.Po + +.c.o: + $(AM_V_CC)$(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< + $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po +# $(AM_V_CC)source='$<' object='$@' libtool=no \ +# DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) \ +# $(AM_V_CC_no)$(COMPILE) -c -o $@ $< + +.c.obj: + $(AM_V_CC)$(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'` + $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po +# $(AM_V_CC)source='$<' object='$@' libtool=no \ +# DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) \ +# $(AM_V_CC_no)$(COMPILE) -c -o $@ `$(CYGPATH_W) '$<'` + +.c.lo: + $(AM_V_CC)$(LTCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< + $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo +# $(AM_V_CC)source='$<' object='$@' libtool=yes \ +# DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) \ +# $(AM_V_CC_no)$(LTCOMPILE) -c -o $@ $< + +mostlyclean-libtool: + -rm -f *.lo + +clean-libtool: + -rm -rf .libs _libs + +ID: $(am__tagged_files) + $(am__define_uniq_tagged_files); mkid -fID $$unique +tags: tags-am +TAGS: tags + +tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) + set x; \ + here=`pwd`; \ + $(am__define_uniq_tagged_files); \ + shift; \ + if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ + test -n "$$unique" || unique=$$empty_fix; \ + if test $$# -gt 0; then \ + $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ + "$$@" $$unique; \ + else \ + $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ + $$unique; \ + fi; \ + fi +ctags: ctags-am + +CTAGS: ctags +ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) + $(am__define_uniq_tagged_files); \ + test -z "$(CTAGS_ARGS)$$unique" \ + || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ + $$unique + +GTAGS: + here=`$(am__cd) $(top_builddir) && pwd` \ + && $(am__cd) $(top_srcdir) \ + && gtags -i $(GTAGS_ARGS) "$$here" +cscopelist: cscopelist-am + +cscopelist-am: $(am__tagged_files) + list='$(am__tagged_files)'; \ + case "$(srcdir)" in \ + [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ + *) sdir=$(subdir)/$(srcdir) ;; \ + esac; \ + for i in $$list; do \ + if test -f "$$i"; then \ + echo "$(subdir)/$$i"; \ + else \ + echo "$$sdir/$$i"; \ + fi; \ + done >> $(top_builddir)/cscope.files + +distclean-tags: + -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags + +distdir: $(DISTFILES) + @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ + topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ + list='$(DISTFILES)'; \ + dist_files=`for file in $$list; do echo $$file; done | \ + sed -e "s|^$$srcdirstrip/||;t" \ + -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ + case $$dist_files in \ + */*) $(MKDIR_P) `echo "$$dist_files" | \ + sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ + sort -u` ;; \ + esac; \ + for file in $$dist_files; do \ + if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ + if test -d $$d/$$file; then \ + dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ + if test -d "$(distdir)/$$file"; then \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ + fi; \ + if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ + cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ + fi; \ + cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ + else \ + test -f "$(distdir)/$$file" \ + || cp -p $$d/$$file "$(distdir)/$$file" \ + || exit 1; \ + fi; \ + done +check-am: all-am +check: check-am +all-am: Makefile $(LTLIBRARIES) $(PROGRAMS) +installdirs: + for dir in "$(DESTDIR)$(bindir)"; do \ + test -z "$$dir" || $(MKDIR_P) "$$dir"; \ + done +install: install-am +install-exec: install-exec-am +install-data: install-data-am +uninstall: uninstall-am + +install-am: all-am + @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am + +installcheck: installcheck-am +install-strip: + if test -z '$(STRIP)'; then \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + install; \ + else \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ + fi +mostlyclean-generic: + +clean-generic: + +distclean-generic: + -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) + -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) + +maintainer-clean-generic: + @echo "This command is intended for maintainers to use" + @echo "it deletes files that may require special tools to rebuild." +clean: clean-am + +clean-am: clean-binPROGRAMS clean-generic clean-libtool \ + clean-noinstLTLIBRARIES mostlyclean-am + +distclean: distclean-am + -rm -rf ./$(DEPDIR) + -rm -f Makefile +distclean-am: clean-am distclean-compile distclean-generic \ + distclean-tags + +dvi: dvi-am + +dvi-am: + +html: html-am + +html-am: + +info: info-am + +info-am: + +install-data-am: + +install-dvi: install-dvi-am + +install-dvi-am: + +install-exec-am: install-binPROGRAMS + +install-html: install-html-am + +install-html-am: + +install-info: install-info-am + +install-info-am: + +install-man: + +install-pdf: install-pdf-am + +install-pdf-am: + +install-ps: install-ps-am + +install-ps-am: + +installcheck-am: + +maintainer-clean: maintainer-clean-am + -rm -rf ./$(DEPDIR) + -rm -f Makefile +maintainer-clean-am: distclean-am maintainer-clean-generic + +mostlyclean: mostlyclean-am + +mostlyclean-am: mostlyclean-compile mostlyclean-generic \ + mostlyclean-libtool + +pdf: pdf-am + +pdf-am: + +ps: ps-am + +ps-am: + +uninstall-am: uninstall-binPROGRAMS + +.MAKE: install-am install-strip + +.PHONY: CTAGS GTAGS TAGS all all-am check check-am clean \ + clean-binPROGRAMS clean-generic clean-libtool \ + clean-noinstLTLIBRARIES cscopelist-am ctags ctags-am distclean \ + distclean-compile distclean-generic distclean-libtool \ + distclean-tags distdir dvi dvi-am html html-am info info-am \ + install install-am install-binPROGRAMS install-data \ + install-data-am install-dvi install-dvi-am install-exec \ + install-exec-am install-html install-html-am install-info \ + install-info-am install-man install-pdf install-pdf-am \ + install-ps install-ps-am install-strip installcheck \ + installcheck-am installdirs maintainer-clean \ + maintainer-clean-generic mostlyclean mostlyclean-compile \ + mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \ + tags tags-am uninstall uninstall-am uninstall-binPROGRAMS + + +# Tell versions [3.59,3.63) of GNU make to not export all variables. +# Otherwise a system limit (for SysV at least) may be exceeded. +.NOEXPORT: diff --git a/src/midi/Makefile.am b/src/midi/Makefile.am new file mode 100644 index 0000000..3282105 --- /dev/null +++ b/src/midi/Makefile.am @@ -0,0 +1,7 @@ +noinst_LTLIBRARIES = libfluidmidi.la +libfluidmidi_la_SOURCES = fluid_midi.c fluid_list.c midi_loader.c + +bin_PROGRAMS = test_midi_loader + +test_midi_loader_SOURCES = test_midi_loader.c +test_midi_loader_LDADD = -lm -ldl libfluidmidi.la diff --git a/src/midi/Makefile.in b/src/midi/Makefile.in new file mode 100644 index 0000000..f406eb8 --- /dev/null +++ b/src/midi/Makefile.in @@ -0,0 +1,645 @@ +# Makefile.in generated by automake 1.14.1 from Makefile.am. +# @configure_input@ + +# Copyright (C) 1994-2013 Free Software Foundation, Inc. + +# This Makefile.in is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY, to the extent permitted by law; without +# even the implied warranty of MERCHANTABILITY or FITNESS FOR A +# PARTICULAR PURPOSE. + +@SET_MAKE@ + + +VPATH = @srcdir@ +am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' +am__make_running_with_option = \ + case $${target_option-} in \ + ?) ;; \ + *) echo "am__make_running_with_option: internal error: invalid" \ + "target option '$${target_option-}' specified" >&2; \ + exit 1;; \ + esac; \ + has_opt=no; \ + sane_makeflags=$$MAKEFLAGS; \ + if $(am__is_gnu_make); then \ + sane_makeflags=$$MFLAGS; \ + else \ + case $$MAKEFLAGS in \ + *\\[\ \ ]*) \ + bs=\\; \ + sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ + | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ + esac; \ + fi; \ + skip_next=no; \ + strip_trailopt () \ + { \ + flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ + }; \ + for flg in $$sane_makeflags; do \ + test $$skip_next = yes && { skip_next=no; continue; }; \ + case $$flg in \ + *=*|--*) continue;; \ + -*I) strip_trailopt 'I'; skip_next=yes;; \ + -*I?*) strip_trailopt 'I';; \ + -*O) strip_trailopt 'O'; skip_next=yes;; \ + -*O?*) strip_trailopt 'O';; \ + -*l) strip_trailopt 'l'; skip_next=yes;; \ + -*l?*) strip_trailopt 'l';; \ + -[dEDm]) skip_next=yes;; \ + -[JT]) skip_next=yes;; \ + esac; \ + case $$flg in \ + *$$target_option*) has_opt=yes; break;; \ + esac; \ + done; \ + test $$has_opt = yes +am__make_dryrun = (target_option=n; $(am__make_running_with_option)) +am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) +pkgdatadir = $(datadir)/@PACKAGE@ +pkgincludedir = $(includedir)/@PACKAGE@ +pkglibdir = $(libdir)/@PACKAGE@ +pkglibexecdir = $(libexecdir)/@PACKAGE@ +am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd +install_sh_DATA = $(install_sh) -c -m 644 +install_sh_PROGRAM = $(install_sh) -c +install_sh_SCRIPT = $(install_sh) -c +INSTALL_HEADER = $(INSTALL_DATA) +transform = $(program_transform_name) +NORMAL_INSTALL = : +PRE_INSTALL = : +POST_INSTALL = : +NORMAL_UNINSTALL = : +PRE_UNINSTALL = : +POST_UNINSTALL = : +build_triplet = @build@ +host_triplet = @host@ +bin_PROGRAMS = test_midi_loader$(EXEEXT) +subdir = src/midi +DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ + $(top_srcdir)/mkinstalldirs $(top_srcdir)/depcomp +ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 +am__aclocal_m4_deps = $(top_srcdir)/configure.ac +am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ + $(ACLOCAL_M4) +mkinstalldirs = $(SHELL) $(top_srcdir)/mkinstalldirs +CONFIG_CLEAN_FILES = +CONFIG_CLEAN_VPATH_FILES = +LTLIBRARIES = $(noinst_LTLIBRARIES) +libfluidmidi_la_LIBADD = +am_libfluidmidi_la_OBJECTS = fluid_midi.lo fluid_list.lo \ + midi_loader.lo +libfluidmidi_la_OBJECTS = $(am_libfluidmidi_la_OBJECTS) +AM_V_lt = $(am__v_lt_@AM_V@) +am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@) +am__v_lt_0 = --silent +am__v_lt_1 = +am__installdirs = "$(DESTDIR)$(bindir)" +PROGRAMS = $(bin_PROGRAMS) +am_test_midi_loader_OBJECTS = test_midi_loader.$(OBJEXT) +test_midi_loader_OBJECTS = $(am_test_midi_loader_OBJECTS) +test_midi_loader_DEPENDENCIES = libfluidmidi.la +AM_V_P = $(am__v_P_@AM_V@) +am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) +am__v_P_0 = false +am__v_P_1 = : +AM_V_GEN = $(am__v_GEN_@AM_V@) +am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) +am__v_GEN_0 = @echo " GEN " $@; +am__v_GEN_1 = +AM_V_at = $(am__v_at_@AM_V@) +am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) +am__v_at_0 = @ +am__v_at_1 = +DEFAULT_INCLUDES = -I.@am__isrc@ +depcomp = $(SHELL) $(top_srcdir)/depcomp +am__depfiles_maybe = depfiles +am__mv = mv -f +COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ + $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) +LTCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ + $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) \ + $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ + $(AM_CFLAGS) $(CFLAGS) +AM_V_CC = $(am__v_CC_@AM_V@) +am__v_CC_ = $(am__v_CC_@AM_DEFAULT_V@) +am__v_CC_0 = @echo " CC " $@; +am__v_CC_1 = +CCLD = $(CC) +LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ + $(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \ + $(AM_LDFLAGS) $(LDFLAGS) -o $@ +AM_V_CCLD = $(am__v_CCLD_@AM_V@) +am__v_CCLD_ = $(am__v_CCLD_@AM_DEFAULT_V@) +am__v_CCLD_0 = @echo " CCLD " $@; +am__v_CCLD_1 = +SOURCES = $(libfluidmidi_la_SOURCES) $(test_midi_loader_SOURCES) +DIST_SOURCES = $(libfluidmidi_la_SOURCES) $(test_midi_loader_SOURCES) +am__can_run_installinfo = \ + case $$AM_UPDATE_INFO_DIR in \ + n|no|NO) false;; \ + *) (install-info --version) >/dev/null 2>&1;; \ + esac +am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) +# Read a list of newline-separated strings from the standard input, +# and print each of them once, without duplicates. Input order is +# *not* preserved. +am__uniquify_input = $(AWK) '\ + BEGIN { nonempty = 0; } \ + { items[$$0] = 1; nonempty = 1; } \ + END { if (nonempty) { for (i in items) print i; }; } \ +' +# Make sure the list of sources is unique. This is necessary because, +# e.g., the same source file might be shared among _SOURCES variables +# for different programs/libraries. +am__define_uniq_tagged_files = \ + list='$(am__tagged_files)'; \ + unique=`for i in $$list; do \ + if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ + done | $(am__uniquify_input)` +ETAGS = etags +CTAGS = ctags +DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) +ACLOCAL = @ACLOCAL@ +ALSA_CFLAGS = @ALSA_CFLAGS@ +ALSA_LIBS = @ALSA_LIBS@ +AMTAR = @AMTAR@ +AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ +AR = @AR@ +AUTOCONF = @AUTOCONF@ +AUTOHEADER = @AUTOHEADER@ +AUTOMAKE = @AUTOMAKE@ +AWK = @AWK@ +CC = @CC@ +CCDEPMODE = @CCDEPMODE@ +CFLAGS = @CFLAGS@ +CPP = @CPP@ +CPPFLAGS = @CPPFLAGS@ +CYGPATH_W = @CYGPATH_W@ +DEFS = @DEFS@ +DEPDIR = @DEPDIR@ +DLLTOOL = @DLLTOOL@ +DSSI_CFLAGS = @DSSI_CFLAGS@ +DSSI_LIBS = @DSSI_LIBS@ +DSYMUTIL = @DSYMUTIL@ +DUMPBIN = @DUMPBIN@ +ECHO_C = @ECHO_C@ +ECHO_N = @ECHO_N@ +ECHO_T = @ECHO_T@ +EGREP = @EGREP@ +EXEEXT = @EXEEXT@ +FGREP = @FGREP@ +GREP = @GREP@ +INSTALL = @INSTALL@ +INSTALL_DATA = @INSTALL_DATA@ +INSTALL_PROGRAM = @INSTALL_PROGRAM@ +INSTALL_SCRIPT = @INSTALL_SCRIPT@ +INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ +LD = @LD@ +LDFLAGS = @LDFLAGS@ +LIBOBJS = @LIBOBJS@ +LIBS = @LIBS@ +LIBTOOL = @LIBTOOL@ +LIPO = @LIPO@ +LN_S = @LN_S@ +LTLIBOBJS = @LTLIBOBJS@ +MAKEINFO = @MAKEINFO@ +MANIFEST_TOOL = @MANIFEST_TOOL@ +MKDIR_P = @MKDIR_P@ +NM = @NM@ +NMEDIT = @NMEDIT@ +OBJDUMP = @OBJDUMP@ +OBJEXT = @OBJEXT@ +OTOOL = @OTOOL@ +OTOOL64 = @OTOOL64@ +PACKAGE = @PACKAGE@ +PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ +PACKAGE_NAME = @PACKAGE_NAME@ +PACKAGE_STRING = @PACKAGE_STRING@ +PACKAGE_TARNAME = @PACKAGE_TARNAME@ +PACKAGE_URL = @PACKAGE_URL@ +PACKAGE_VERSION = @PACKAGE_VERSION@ +PATH_SEPARATOR = @PATH_SEPARATOR@ +PKG_CONFIG = @PKG_CONFIG@ +PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ +PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ +RANLIB = @RANLIB@ +SED = @SED@ +SET_MAKE = @SET_MAKE@ +SHELL = @SHELL@ +SNDFILE_CFLAGS = @SNDFILE_CFLAGS@ +SNDFILE_LIBS = @SNDFILE_LIBS@ +STRIP = @STRIP@ +VERSION = @VERSION@ +abs_builddir = @abs_builddir@ +abs_srcdir = @abs_srcdir@ +abs_top_builddir = @abs_top_builddir@ +abs_top_srcdir = @abs_top_srcdir@ +ac_ct_AR = @ac_ct_AR@ +ac_ct_CC = @ac_ct_CC@ +ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ +am__include = @am__include@ +am__leading_dot = @am__leading_dot@ +am__quote = @am__quote@ +am__tar = @am__tar@ +am__untar = @am__untar@ +bindir = @bindir@ +build = @build@ +build_alias = @build_alias@ +build_cpu = @build_cpu@ +build_os = @build_os@ +build_vendor = @build_vendor@ +builddir = @builddir@ +datadir = @datadir@ +datarootdir = @datarootdir@ +docdir = @docdir@ +dvidir = @dvidir@ +exec_prefix = @exec_prefix@ +host = @host@ +host_alias = @host_alias@ +host_cpu = @host_cpu@ +host_os = @host_os@ +host_vendor = @host_vendor@ +htmldir = @htmldir@ +includedir = @includedir@ +infodir = @infodir@ +install_sh = @install_sh@ +libdir = @libdir@ +libexecdir = @libexecdir@ +localedir = @localedir@ +localstatedir = @localstatedir@ +mandir = @mandir@ +mkdir_p = @mkdir_p@ +oldincludedir = @oldincludedir@ +pdfdir = @pdfdir@ +prefix = @prefix@ +program_transform_name = @program_transform_name@ +psdir = @psdir@ +sbindir = @sbindir@ +sharedstatedir = @sharedstatedir@ +srcdir = @srcdir@ +sysconfdir = @sysconfdir@ +target_alias = @target_alias@ +top_build_prefix = @top_build_prefix@ +top_builddir = @top_builddir@ +top_srcdir = @top_srcdir@ +noinst_LTLIBRARIES = libfluidmidi.la +libfluidmidi_la_SOURCES = fluid_midi.c fluid_list.c midi_loader.c +test_midi_loader_SOURCES = test_midi_loader.c +test_midi_loader_LDADD = -lm -ldl libfluidmidi.la +all: all-am + +.SUFFIXES: +.SUFFIXES: .c .lo .o .obj +$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) + @for dep in $?; do \ + case '$(am__configure_deps)' in \ + *$$dep*) \ + ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ + && { if test -f $@; then exit 0; else break; fi; }; \ + exit 1;; \ + esac; \ + done; \ + echo ' cd $(top_srcdir) && $(AUTOMAKE) --gnu src/midi/Makefile'; \ + $(am__cd) $(top_srcdir) && \ + $(AUTOMAKE) --gnu src/midi/Makefile +.PRECIOUS: Makefile +Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status + @case '$?' in \ + *config.status*) \ + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ + *) \ + echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ + cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ + esac; + +$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh + +$(top_srcdir)/configure: $(am__configure_deps) + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh +$(ACLOCAL_M4): $(am__aclocal_m4_deps) + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh +$(am__aclocal_m4_deps): + +clean-noinstLTLIBRARIES: + -test -z "$(noinst_LTLIBRARIES)" || rm -f $(noinst_LTLIBRARIES) + @list='$(noinst_LTLIBRARIES)'; \ + locs=`for p in $$list; do echo $$p; done | \ + sed 's|^[^/]*$$|.|; s|/[^/]*$$||; s|$$|/so_locations|' | \ + sort -u`; \ + test -z "$$locs" || { \ + echo rm -f $${locs}; \ + rm -f $${locs}; \ + } + +libfluidmidi.la: $(libfluidmidi_la_OBJECTS) $(libfluidmidi_la_DEPENDENCIES) $(EXTRA_libfluidmidi_la_DEPENDENCIES) + $(AM_V_CCLD)$(LINK) $(libfluidmidi_la_OBJECTS) $(libfluidmidi_la_LIBADD) $(LIBS) +install-binPROGRAMS: $(bin_PROGRAMS) + @$(NORMAL_INSTALL) + @list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \ + if test -n "$$list"; then \ + echo " $(MKDIR_P) '$(DESTDIR)$(bindir)'"; \ + $(MKDIR_P) "$(DESTDIR)$(bindir)" || exit 1; \ + fi; \ + for p in $$list; do echo "$$p $$p"; done | \ + sed 's/$(EXEEXT)$$//' | \ + while read p p1; do if test -f $$p \ + || test -f $$p1 \ + ; then echo "$$p"; echo "$$p"; else :; fi; \ + done | \ + sed -e 'p;s,.*/,,;n;h' \ + -e 's|.*|.|' \ + -e 'p;x;s,.*/,,;s/$(EXEEXT)$$//;$(transform);s/$$/$(EXEEXT)/' | \ + sed 'N;N;N;s,\n, ,g' | \ + $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1 } \ + { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \ + if ($$2 == $$4) files[d] = files[d] " " $$1; \ + else { print "f", $$3 "/" $$4, $$1; } } \ + END { for (d in files) print "f", d, files[d] }' | \ + while read type dir files; do \ + if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \ + test -z "$$files" || { \ + echo " $(INSTALL_PROGRAM_ENV) $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL_PROGRAM) $$files '$(DESTDIR)$(bindir)$$dir'"; \ + $(INSTALL_PROGRAM_ENV) $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL_PROGRAM) $$files "$(DESTDIR)$(bindir)$$dir" || exit $$?; \ + } \ + ; done + +uninstall-binPROGRAMS: + @$(NORMAL_UNINSTALL) + @list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \ + files=`for p in $$list; do echo "$$p"; done | \ + sed -e 'h;s,^.*/,,;s/$(EXEEXT)$$//;$(transform)' \ + -e 's/$$/$(EXEEXT)/' \ + `; \ + test -n "$$list" || exit 0; \ + echo " ( cd '$(DESTDIR)$(bindir)' && rm -f" $$files ")"; \ + cd "$(DESTDIR)$(bindir)" && rm -f $$files + +clean-binPROGRAMS: + @list='$(bin_PROGRAMS)'; test -n "$$list" || exit 0; \ + echo " rm -f" $$list; \ + rm -f $$list || exit $$?; \ + test -n "$(EXEEXT)" || exit 0; \ + list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \ + echo " rm -f" $$list; \ + rm -f $$list + +test_midi_loader$(EXEEXT): $(test_midi_loader_OBJECTS) $(test_midi_loader_DEPENDENCIES) $(EXTRA_test_midi_loader_DEPENDENCIES) + @rm -f test_midi_loader$(EXEEXT) + $(AM_V_CCLD)$(LINK) $(test_midi_loader_OBJECTS) $(test_midi_loader_LDADD) $(LIBS) + +mostlyclean-compile: + -rm -f *.$(OBJEXT) + +distclean-compile: + -rm -f *.tab.c + +@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/fluid_list.Plo@am__quote@ +@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/fluid_midi.Plo@am__quote@ +@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/midi_loader.Plo@am__quote@ +@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/test_midi_loader.Po@am__quote@ + +.c.o: +@am__fastdepCC_TRUE@ $(AM_V_CC)$(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< +@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po +@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ +@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ +@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(COMPILE) -c -o $@ $< + +.c.obj: +@am__fastdepCC_TRUE@ $(AM_V_CC)$(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'` +@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po +@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ +@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ +@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(COMPILE) -c -o $@ `$(CYGPATH_W) '$<'` + +.c.lo: +@am__fastdepCC_TRUE@ $(AM_V_CC)$(LTCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< +@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo +@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@ +@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ +@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LTCOMPILE) -c -o $@ $< + +mostlyclean-libtool: + -rm -f *.lo + +clean-libtool: + -rm -rf .libs _libs + +ID: $(am__tagged_files) + $(am__define_uniq_tagged_files); mkid -fID $$unique +tags: tags-am +TAGS: tags + +tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) + set x; \ + here=`pwd`; \ + $(am__define_uniq_tagged_files); \ + shift; \ + if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ + test -n "$$unique" || unique=$$empty_fix; \ + if test $$# -gt 0; then \ + $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ + "$$@" $$unique; \ + else \ + $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ + $$unique; \ + fi; \ + fi +ctags: ctags-am + +CTAGS: ctags +ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) + $(am__define_uniq_tagged_files); \ + test -z "$(CTAGS_ARGS)$$unique" \ + || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ + $$unique + +GTAGS: + here=`$(am__cd) $(top_builddir) && pwd` \ + && $(am__cd) $(top_srcdir) \ + && gtags -i $(GTAGS_ARGS) "$$here" +cscopelist: cscopelist-am + +cscopelist-am: $(am__tagged_files) + list='$(am__tagged_files)'; \ + case "$(srcdir)" in \ + [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ + *) sdir=$(subdir)/$(srcdir) ;; \ + esac; \ + for i in $$list; do \ + if test -f "$$i"; then \ + echo "$(subdir)/$$i"; \ + else \ + echo "$$sdir/$$i"; \ + fi; \ + done >> $(top_builddir)/cscope.files + +distclean-tags: + -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags + +distdir: $(DISTFILES) + @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ + topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ + list='$(DISTFILES)'; \ + dist_files=`for file in $$list; do echo $$file; done | \ + sed -e "s|^$$srcdirstrip/||;t" \ + -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ + case $$dist_files in \ + */*) $(MKDIR_P) `echo "$$dist_files" | \ + sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ + sort -u` ;; \ + esac; \ + for file in $$dist_files; do \ + if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ + if test -d $$d/$$file; then \ + dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ + if test -d "$(distdir)/$$file"; then \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ + fi; \ + if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ + cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ + fi; \ + cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ + else \ + test -f "$(distdir)/$$file" \ + || cp -p $$d/$$file "$(distdir)/$$file" \ + || exit 1; \ + fi; \ + done +check-am: all-am +check: check-am +all-am: Makefile $(LTLIBRARIES) $(PROGRAMS) +installdirs: + for dir in "$(DESTDIR)$(bindir)"; do \ + test -z "$$dir" || $(MKDIR_P) "$$dir"; \ + done +install: install-am +install-exec: install-exec-am +install-data: install-data-am +uninstall: uninstall-am + +install-am: all-am + @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am + +installcheck: installcheck-am +install-strip: + if test -z '$(STRIP)'; then \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + install; \ + else \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ + fi +mostlyclean-generic: + +clean-generic: + +distclean-generic: + -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) + -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) + +maintainer-clean-generic: + @echo "This command is intended for maintainers to use" + @echo "it deletes files that may require special tools to rebuild." +clean: clean-am + +clean-am: clean-binPROGRAMS clean-generic clean-libtool \ + clean-noinstLTLIBRARIES mostlyclean-am + +distclean: distclean-am + -rm -rf ./$(DEPDIR) + -rm -f Makefile +distclean-am: clean-am distclean-compile distclean-generic \ + distclean-tags + +dvi: dvi-am + +dvi-am: + +html: html-am + +html-am: + +info: info-am + +info-am: + +install-data-am: + +install-dvi: install-dvi-am + +install-dvi-am: + +install-exec-am: install-binPROGRAMS + +install-html: install-html-am + +install-html-am: + +install-info: install-info-am + +install-info-am: + +install-man: + +install-pdf: install-pdf-am + +install-pdf-am: + +install-ps: install-ps-am + +install-ps-am: + +installcheck-am: + +maintainer-clean: maintainer-clean-am + -rm -rf ./$(DEPDIR) + -rm -f Makefile +maintainer-clean-am: distclean-am maintainer-clean-generic + +mostlyclean: mostlyclean-am + +mostlyclean-am: mostlyclean-compile mostlyclean-generic \ + mostlyclean-libtool + +pdf: pdf-am + +pdf-am: + +ps: ps-am + +ps-am: + +uninstall-am: uninstall-binPROGRAMS + +.MAKE: install-am install-strip + +.PHONY: CTAGS GTAGS TAGS all all-am check check-am clean \ + clean-binPROGRAMS clean-generic clean-libtool \ + clean-noinstLTLIBRARIES cscopelist-am ctags ctags-am distclean \ + distclean-compile distclean-generic distclean-libtool \ + distclean-tags distdir dvi dvi-am html html-am info info-am \ + install install-am install-binPROGRAMS install-data \ + install-data-am install-dvi install-dvi-am install-exec \ + install-exec-am install-html install-html-am install-info \ + install-info-am install-man install-pdf install-pdf-am \ + install-ps install-ps-am install-strip installcheck \ + installcheck-am installdirs maintainer-clean \ + maintainer-clean-generic mostlyclean mostlyclean-compile \ + mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \ + tags tags-am uninstall uninstall-am uninstall-binPROGRAMS + + +# Tell versions [3.59,3.63) of GNU make to not export all variables. +# Otherwise a system limit (for SysV at least) may be exceeded. +.NOEXPORT: diff --git a/src/midi/NOTES b/src/midi/NOTES new file mode 100644 index 0000000..4bd7b67 --- /dev/null +++ b/src/midi/NOTES @@ -0,0 +1,84 @@ + + //unsigned long nevents; weird...so the plugin must know about previous notes that are pressed? yes for each call run_synth it should know about note which are currently pressed. oh but there isn't any place to indicate that? seems like we just need + //to keep track of how many notes are currently running and pass it an unsigned long that has that count, but aside from that, + //it just takes note_on and note_off structs, which refer to only one note at a time? not really, + //so in C to pass an array we have to use two variables onw is a pointer to data, another is number of elements in that array. so run_synth is one of such functions, and if you have two notes pressed you send array of size 2, storing two elements nearby in memory and setting nevents to 2. got it, so current_event isn't a pointer to a struct, it's a pointer to an array of event structs, most likely? yes +// +//so what do we do next? we need to construct this array of currently pressed notes, so need to check events and try to construc this. + + + + +//ok moving on...I did a bit of research about midi types...basically the reason mrswatson only supports type0 midi is that +//the idea behind type0 midi is that it is single-channel. Here channel is a bit of a confusing word, because there is also such a thing as an audio channel, obviously +//but the two are completely unrelated. Channel is like a track in multitrack recording, type0 midi only has one of these tracks, but potentially has program +//changes within it (program change is just a change of voice). so what we're working on here only handles single channel midi as well, and that's good. +//Only thing to think about is, if we abstract our loop to process the note events in the channel, rather than the note events in the entire file (unfortunately confusing +//terminology again, because in midifiles, the entire file is called a "track"). But this is basically the situation. +// +//Midi events come in a few different types, but basically there are three types that are important +// +//meta type events +// +//channel events (events particular to a single channel) +// +//track events (events that apply to every channel) +// +//so meta type events are sort of special events, +// +//channel events and track events are more like "regular" events.... +// +//do you follow so far? yep +// +// +// +//So the main question to ask you is, since we are iterating through all events, how do we handle multiple channels? we have the option to do what mrswatson has done +//which is to read the midi file type from the header and end in error if it is anything but type0, or we can sort of process the channels individually in the sense that we send appropriate info to the plugin, telling it which channel it is intended for. Most plugins though don't care about multiple channels +// +//another option, more related to the first is to write a simple script (or find one) that separates out multi-channel midi into individual type0 files. +// +//so with multi channel files, if events are sorted by time in file, then we can just send them all at once as it was one channel, so plugin will produce resulting file. +//if that's the plan, I guess we should check to make sure that dssi doesn't support multiple channels, because if it does, maybe we should tell the plugin which channel each event is intended for? i think snd_seq_event_t has that filed for channel, so we just need to copy that info there. +// +// +// +//ok do we break now? seems a bit late yeah, so basically what is left to do is to write it from pseudo code to real code of this function, add pritngin to see how it works and if there is any bugs to fix them. +//ok then make this into a module, add it to cli-dssi-host and change cli-dssi-host source to deal with this loop? yep. got it. maybe we can look into this tomorrow morning? sure ok perfect + + + + //ok so what were you thinking here? + //ok and finally that run_synth need array which is not sparse, where elements are without spaces, so either you rebuild + //new array every time you call run_synth, or keep elements together and use find function to locate a note. find function + //sounds best, either way, do we need two arrays? no need + + //with this, can I assign sparsely to it, if I want to put the notes[100] = 4; , will that cause an error? + //so first is that you need to put size of memory you use in malloc, not e lemtns, like that, right? yep + //second is that we need snd_seq_event_t intead of ints. I was just thinking that if the note is pressed, it wouldn + //have a value , if not it wouldn't so it would be an easy way to keep track of which are pressed or not? hmm sort of short lookup arraly, might work if built properly. ok well I don't know if that's right I'll make another array +//question is, if we're sending a list, and a NOTE_ON and NOTE_OFF must be removed from the list once it has been already processed, how do we access those elements? by note name somehow? yeah need to track each note on\off pair. +//no clue how to do this in C, easy in any other language, just use an associative array +//right in C it turns into function which finds an element in array. how big should our array be? do we use the heap here? ideally we need to use it, but guess we can limit number of notes which are pressed at same time, like 100 or so should be plently there are only so many midi notes, I'll get the count 128 that should be the limit good, try write with heap functions, might be useful to see how to work with it. well could I just + + + + + //so what does this return exactly? so then event arrives we need to try find note if it was pressed or not. + //so basically whole logic of get_events function is: + // + //snd_seq_event_t event = convert_event(fluid_midi_event_t); + //event_in_table = find_event(&event); + //switch(event->type ){ + // case NOTE_ON: + // if(event_in_table) return; // skip if pressed while still was pressed, shouldn't happen...I think it just gets pressed again, but both are deleted on the note_off, does that make sense? hmm might be, but that change a logic a bit + // yeah something like this. so basic idea is clear or not really? yeah it's clear, just too advanced for me + // to come up with on my own right now given my current skills with C, it's coming along fast though well it's simple then written in perl or somethng else, just showing how c does the same things. so now need to implement those functions ok + // insert_event(event); + // run_synth(); + // + // case NOTE_OFF: + // replace_events(event_in_table,event) + // run_synth() + // delete_events(event); + // + //} diff --git a/src/midi/example.mid b/src/midi/example.mid Binary files differnew file mode 100644 index 0000000..328d6ef --- /dev/null +++ b/src/midi/example.mid diff --git a/src/midi/example1.mid b/src/midi/example1.mid Binary files differnew file mode 100644 index 0000000..2fa1b91 --- /dev/null +++ b/src/midi/example1.mid diff --git a/src/midi/example2.mid b/src/midi/example2.mid Binary files differnew file mode 100644 index 0000000..bf3bfc0 --- /dev/null +++ b/src/midi/example2.mid diff --git a/src/midi/fluid_list.c b/src/midi/fluid_list.c new file mode 100644 index 0000000..e06b6f9 --- /dev/null +++ b/src/midi/fluid_list.c @@ -0,0 +1,244 @@ + + + +#include "fluid_list.h" +#include <stdlib.h> + + +fluid_list_t* +new_fluid_list(void) +{ + fluid_list_t* list; + list = (fluid_list_t*) FLUID_MALLOC(sizeof(fluid_list_t)); + list->data = NULL; + list->next = NULL; + return list; +} + +void +delete_fluid_list(fluid_list_t *list) +{ + fluid_list_t *next; + while (list) { + next = list->next; + FLUID_FREE(list); + list = next; + } +} + +void +delete1_fluid_list(fluid_list_t *list) +{ + if (list) { + FLUID_FREE(list); + } +} + +fluid_list_t* +fluid_list_append(fluid_list_t *list, void* data) +{ + fluid_list_t *new_list; + fluid_list_t *last; + + new_list = new_fluid_list(); + new_list->data = data; + + if (list) + { + last = fluid_list_last(list); + /* g_assert (last != NULL); */ + last->next = new_list; + + return list; + } + else + return new_list; +} + +fluid_list_t* +fluid_list_prepend(fluid_list_t *list, void* data) +{ + fluid_list_t *new_list; + + new_list = new_fluid_list(); + new_list->data = data; + new_list->next = list; + + return new_list; +} + +fluid_list_t* +fluid_list_nth(fluid_list_t *list, int n) +{ + while ((n-- > 0) && list) { + list = list->next; + } + + return list; +} + +fluid_list_t* +fluid_list_remove(fluid_list_t *list, void* data) +{ + fluid_list_t *tmp; + fluid_list_t *prev; + + prev = NULL; + tmp = list; + + while (tmp) { + if (tmp->data == data) { + if (prev) { + prev->next = tmp->next; + } + if (list == tmp) { + list = list->next; + } + tmp->next = NULL; + delete_fluid_list(tmp); + + break; + } + + prev = tmp; + tmp = tmp->next; + } + + return list; +} + +fluid_list_t* +fluid_list_remove_link(fluid_list_t *list, fluid_list_t *link) +{ + fluid_list_t *tmp; + fluid_list_t *prev; + + prev = NULL; + tmp = list; + + while (tmp) { + if (tmp == link) { + if (prev) { + prev->next = tmp->next; + } + if (list == tmp) { + list = list->next; + } + tmp->next = NULL; + break; + } + + prev = tmp; + tmp = tmp->next; + } + + return list; +} + +static fluid_list_t* +fluid_list_sort_merge(fluid_list_t *l1, fluid_list_t *l2, fluid_compare_func_t compare_func) +{ + fluid_list_t list, *l; + + l = &list; + + while (l1 && l2) { + if (compare_func(l1->data,l2->data) < 0) { + l = l->next = l1; + l1 = l1->next; + } else { + l = l->next = l2; + l2 = l2->next; + } + } + l->next= l1 ? l1 : l2; + + return list.next; +} + +fluid_list_t* +fluid_list_sort(fluid_list_t *list, fluid_compare_func_t compare_func) +{ + fluid_list_t *l1, *l2; + + if (!list) { + return NULL; + } + if (!list->next) { + return list; + } + + l1 = list; + l2 = list->next; + + while ((l2 = l2->next) != NULL) { + if ((l2 = l2->next) == NULL) + break; + l1=l1->next; + } + l2 = l1->next; + l1->next = NULL; + + return fluid_list_sort_merge(fluid_list_sort(list, compare_func), + fluid_list_sort(l2, compare_func), + compare_func); +} + + +fluid_list_t* +fluid_list_last(fluid_list_t *list) +{ + if (list) { + while (list->next) + list = list->next; + } + + return list; +} + +int +fluid_list_size(fluid_list_t *list) +{ + int n = 0; + while (list) { + n++; + list = list->next; + } + return n; +} + +fluid_list_t* fluid_list_insert_at(fluid_list_t *list, int n, void* data) +{ + fluid_list_t *new_list; + fluid_list_t *cur; + fluid_list_t *prev = NULL; + + new_list = new_fluid_list(); + new_list->data = data; + + cur = list; + while ((n-- > 0) && cur) { + prev = cur; + cur = cur->next; + } + + new_list->next = cur; + + if (prev) { + prev->next = new_list; + return list; + } else { + return new_list; + } +} + +/* Compare function to sort strings alphabetically, + * for use with fluid_list_sort(). */ +int +fluid_list_str_compare_func (void *a, void *b) +{ + if (a && b) return FLUID_STRCMP ((char *)a, (char *)b); + if (!a && !b) return 0; + if (a) return -1; + return 1; +} diff --git a/src/midi/fluid_list.h b/src/midi/fluid_list.h new file mode 100644 index 0000000..bdc3291 --- /dev/null +++ b/src/midi/fluid_list.h @@ -0,0 +1,62 @@ +/* GLIB - Library of useful routines for C programming + * Copyright (C) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02110-1301, USA. + */ + +#ifndef _FLUID_LIST_H +#define _FLUID_LIST_H + +#include "fluidsynth_priv.h" + +/* + * + * Lists + * + * A sound font loader has to pack the data from the .SF2 file into + * list structures of this type. + * + */ + +typedef struct _fluid_list_t fluid_list_t; + +typedef int (*fluid_compare_func_t)(void* a, void* b); + +struct _fluid_list_t +{ + void* data; + fluid_list_t *next; +}; + +fluid_list_t* new_fluid_list(void); +void delete_fluid_list(fluid_list_t *list); +void delete1_fluid_list(fluid_list_t *list); +fluid_list_t* fluid_list_sort(fluid_list_t *list, fluid_compare_func_t compare_func); +fluid_list_t* fluid_list_append(fluid_list_t *list, void* data); +fluid_list_t* fluid_list_prepend(fluid_list_t *list, void* data); +fluid_list_t* fluid_list_remove(fluid_list_t *list, void* data); +fluid_list_t* fluid_list_remove_link(fluid_list_t *list, fluid_list_t *llink); +fluid_list_t* fluid_list_nth(fluid_list_t *list, int n); +fluid_list_t* fluid_list_last(fluid_list_t *list); +fluid_list_t* fluid_list_insert_at(fluid_list_t *list, int n, void* data); +int fluid_list_size(fluid_list_t *list); + +#define fluid_list_next(slist) ((slist) ? (((fluid_list_t *)(slist))->next) : NULL) +#define fluid_list_get(slist) ((slist) ? ((slist)->data) : NULL) + +int fluid_list_str_compare_func (void *a, void *b); + +#endif /* _FLUID_LIST_H */ diff --git a/src/midi/fluid_list.lo b/src/midi/fluid_list.lo new file mode 100644 index 0000000..bbc909d --- /dev/null +++ b/src/midi/fluid_list.lo @@ -0,0 +1,12 @@ +# fluid_list.lo - a libtool object file +# Generated by libtool (GNU libtool) 2.4.2 +# +# Please DO NOT delete this file! +# It is necessary for linking the library. + +# Name of the PIC object. +pic_object='.libs/fluid_list.o' + +# Name of the non-PIC object +non_pic_object='fluid_list.o' + diff --git a/src/midi/fluid_list.o b/src/midi/fluid_list.o Binary files differnew file mode 100644 index 0000000..7f329e4 --- /dev/null +++ b/src/midi/fluid_list.o diff --git a/src/midi/fluid_midi.c b/src/midi/fluid_midi.c new file mode 100644 index 0000000..062ba3c --- /dev/null +++ b/src/midi/fluid_midi.c @@ -0,0 +1,1944 @@ + +#include "fluid_midi.h" +#include <math.h> +//#include "fluid_sys.h" +//#include "fluid_synth.h" +//#include "fluid_settings.h" + +int +fluid_log(int level, const char* fmt, ...) +{ + +} + +static int fluid_midi_event_length(unsigned char event); + +/* Read the entire contents of a file into memory, allocating enough memory + * for the file, and returning the length and the buffer. + * Note: This rewinds the file to the start before reading. + * Returns NULL if there was an error reading or allocating memory. + */ +static char* fluid_file_read_full(fluid_file fp, size_t* length); +#define READ_FULL_INITIAL_BUFLEN 1024 + + +/*************************************************************** + * FILE READ HERE + * + * MIDIFILE + */ + +/** + * Return a new MIDI file handle for parsing an already-loaded MIDI file. + * @internal + * @param buffer Pointer to full contents of MIDI file (borrows the pointer). + * The caller must not free buffer until after the fluid_midi_file is deleted. + * @param length Size of the buffer in bytes. + * @return New MIDI file handle or NULL on error. + */ +fluid_midi_file * +new_fluid_midi_file(const char* buffer, size_t length) +{ + fluid_midi_file *mf; + + mf = FLUID_NEW(fluid_midi_file); + if (mf == NULL) { + FLUID_LOG(FLUID_ERR, "Out of memory"); + return NULL; + } + FLUID_MEMSET(mf, 0, sizeof(fluid_midi_file)); + + mf->c = -1; + mf->running_status = -1; + + mf->buffer = buffer; + mf->buf_len = length; + mf->buf_pos = 0; + mf->eof = FALSE; + + if (fluid_midi_file_read_mthd(mf) != FLUID_OK) { // here it calls read file + FLUID_FREE(mf); + return NULL; + } + return mf; +} +//ok how do you usually create a file_pointer in this case? with fopen FILE READ do you think we can just try this function directly with fopen? yes, fluid_file i think is just FILE * returned by fopen, so code will be fopen, fluid_file_read_full and then new_fluid_midi_file. + +static char* +fluid_file_read_full(fluid_file fp, size_t* length) +{ + size_t buflen; + char* buffer; + size_t n; + /* Work out the length of the file in advance */ + if (FLUID_FSEEK(fp, 0, SEEK_END) != 0) + { + FLUID_LOG(FLUID_ERR, "File load: Could not seek within file"); + return NULL; + } + buflen = ftell(fp); + if (FLUID_FSEEK(fp, 0, SEEK_SET) != 0) + { + FLUID_LOG(FLUID_ERR, "File load: Could not seek within file"); + return NULL; + } + FLUID_LOG(FLUID_DBG, "File load: Allocating %d bytes", buflen); + buffer = FLUID_MALLOC(buflen); + if (buffer == NULL) { + FLUID_LOG(FLUID_PANIC, "Out of memory"); + return NULL; + } + n = FLUID_FREAD(buffer, 1, buflen, fp); + if (n != buflen) { + FLUID_LOG(FLUID_ERR, "Only read %d bytes; expected %d", n, + buflen); + FLUID_FREE(buffer); + return NULL; + }; + *length = n; + return buffer; +} + +/** + * Delete a MIDI file handle. + * @internal + * @param mf MIDI file handle to close and free. + */ +void +delete_fluid_midi_file (fluid_midi_file *mf) +{ + if (mf == NULL) { + return; + } + FLUID_FREE(mf); + return; +} + +/* + * Gets the next byte in a MIDI file, taking into account previous running status. + * + * returns FLUID_FAILED if EOF or read error + */ +int +fluid_midi_file_getc (fluid_midi_file *mf) +{ + unsigned char c; + if (mf->c >= 0) { + c = mf->c; + mf->c = -1; + } else { + if (mf->buf_pos >= mf->buf_len) { + mf->eof = TRUE; + return FLUID_FAILED; + } + c = mf->buffer[mf->buf_pos++]; + mf->trackpos++; + } + return (int) c; +} + +/* + * Saves a byte to be returned the next time fluid_midi_file_getc() is called, + * when it is necessary according to running status. + */ +int +fluid_midi_file_push(fluid_midi_file *mf, int c) +{ + mf->c = c; + return FLUID_OK; +} + +/* + * fluid_midi_file_read + */ +int +fluid_midi_file_read(fluid_midi_file *mf, void *buf, int len) +{ + int num = len < mf->buf_len - mf->buf_pos + ? len : mf->buf_len - mf->buf_pos; + if (num != len) { + mf->eof = TRUE; + } + if (num < 0) { + num = 0; + } + /* Note: Read bytes, even if there aren't enough, but only increment + * trackpos if successful (emulates old behaviour of fluid_midi_file_read) + */ + FLUID_MEMCPY(buf, mf->buffer+mf->buf_pos, num); + mf->buf_pos += num; + if (num == len) + mf->trackpos += num; +#if DEBUG + else + FLUID_LOG(FLUID_DBG, "Could not read the requested number of bytes"); +#endif + return (num != len) ? FLUID_FAILED : FLUID_OK; +} + +/* + * fluid_midi_file_skip + */ +int +fluid_midi_file_skip (fluid_midi_file *mf, int skip) +{ + int new_pos = mf->buf_pos + skip; + /* Mimic the behaviour of fseek: Error to seek past the start of file, but + * OK to seek past end (this just puts it into the EOF state). */ + if (new_pos < 0) { + FLUID_LOG(FLUID_ERR, "Failed to seek position in file"); + return FLUID_FAILED; + } + /* Clear the EOF flag, even if moved past the end of the file (this is + * consistent with the behaviour of fseek). */ + mf->eof = FALSE; + mf->buf_pos = new_pos; + return FLUID_OK; +} + +/* + * fluid_midi_file_eof + */ +int fluid_midi_file_eof(fluid_midi_file* mf) +{ + /* Note: This does not simply test whether the file read pointer is past + * the end of the file. It mimics the behaviour of feof by actually + * testing the stateful EOF condition, which is set to TRUE if getc or + * fread have attempted to read past the end (but not if they have + * precisely reached the end), but reset to FALSE upon a successful seek. + */ + return mf->eof; +} + +/* + * fluid_midi_file_read_mthd + */ +//it's actually does all the work inside new_fluid_midi_file function to read file and get all events allocated and set, so now we just need to print those. +int +fluid_midi_file_read_mthd(fluid_midi_file *mf) +{ + char mthd[15]; + if (fluid_midi_file_read(mf, mthd, 14) != FLUID_OK) { + return FLUID_FAILED; + } + if ((FLUID_STRNCMP(mthd, "MThd", 4) != 0) || (mthd[7] != 6) + || (mthd[9] > 2)) { + FLUID_LOG(FLUID_ERR, + "Doesn't look like a MIDI file: invalid MThd header"); + return FLUID_FAILED; + } + mf->type = mthd[9]; + mf->ntracks = (unsigned) mthd[11]; + mf->ntracks += (unsigned int) (mthd[10]) << 16; + if ((mthd[12]) < 0) { + mf->uses_smpte = 1; + mf->smpte_fps = -mthd[12]; + mf->smpte_res = (unsigned) mthd[13]; + FLUID_LOG(FLUID_ERR, "File uses SMPTE timing -- Not implemented yet"); + return FLUID_FAILED; + } else { + mf->uses_smpte = 0; + mf->division = (mthd[12] << 8) | (mthd[13] & 0xff); //division is in the header for the midi file, he gets the value here. do we have access to it in custom.c? looks like player have also current time in milliseconds too, in addition to ticks, we can try to just use those. ok good. so the player is doing the parsing, we're just + + FLUID_LOG(FLUID_DBG, "Division=%d", mf->division); + } + return FLUID_OK; +} + +/* + * fluid_midi_file_load_tracks + * so now we load tracks, right? yeah but looks like we already have such code in here: + */ +int +fluid_midi_file_load_tracks(fluid_midi_file *mf, fluid_player_t *player) +{ + int i; + for (i = 0; i < mf->ntracks; i++) { + if (fluid_midi_file_read_track(mf, player, i) != FLUID_OK) { + return FLUID_FAILED; + } + } + return FLUID_OK; +} + +/* + * fluid_isasciistring + */ +int +fluid_isasciistring(char *s) +{ + int i; + int len = (int) FLUID_STRLEN(s); + for (i = 0; i < len; i++) { + if (!fluid_isascii(s[i])) { + return 0; + } + } + return 1; +} + +/* + * fluid_getlength + */ +long +fluid_getlength(unsigned char *s) +{ + long i = 0; + i = s[3] | (s[2] << 8) | (s[1] << 16) | (s[0] << 24); + return i; +} + +/* + * fluid_midi_file_read_tracklen + */ +int +fluid_midi_file_read_tracklen(fluid_midi_file *mf) +{ + unsigned char length[5]; + if (fluid_midi_file_read(mf, length, 4) != FLUID_OK) { + return FLUID_FAILED; + } + mf->tracklen = fluid_getlength(length); + mf->trackpos = 0; + mf->eot = 0; + return FLUID_OK; +} + +/* + * fluid_midi_file_eot + */ +int +fluid_midi_file_eot(fluid_midi_file *mf) +{ +#if DEBUG + if (mf->trackpos > mf->tracklen) { + printf("track overrun: %d > %d\n", mf->trackpos, mf->tracklen); + } +#endif + return mf->eot || (mf->trackpos >= mf->tracklen); +} + +/* + * fluid_midi_file_read_track + */ +int +fluid_midi_file_read_track(fluid_midi_file *mf, fluid_player_t *player, int num) +{ + fluid_track_t *track; + unsigned char id[5], length[5]; + int found_track = 0; + int skip; + + if (fluid_midi_file_read(mf, id, 4) != FLUID_OK) { + return FLUID_FAILED; + } + id[4] = '\0'; + mf->dtime = 0; + + while (!found_track) { + + if (fluid_isasciistring((char *) id) == 0) { + FLUID_LOG(FLUID_ERR, + "An non-ascii track header found, corrupt file"); + return FLUID_FAILED; + + } else if (strcmp((char *) id, "MTrk") == 0) { + + found_track = 1; + + if (fluid_midi_file_read_tracklen(mf) != FLUID_OK) { + return FLUID_FAILED; + } + + track = new_fluid_track(num); + if (track == NULL) { + FLUID_LOG(FLUID_ERR, "Out of memory"); + return FLUID_FAILED; + } + + while (!fluid_midi_file_eot(mf)) { + if (fluid_midi_file_read_event(mf, track) != FLUID_OK) { + delete_fluid_track(track); + return FLUID_FAILED; + } + } + + /* Skip remaining track data, if any */ + if (mf->trackpos < mf->tracklen) + fluid_midi_file_skip(mf, mf->tracklen - mf->trackpos); + + fluid_player_add_track(player, track); + + } else { + found_track = 0; + if (fluid_midi_file_read(mf, length, 4) != FLUID_OK) { + return FLUID_FAILED; + } + skip = fluid_getlength(length); + /* fseek(mf->fp, skip, SEEK_CUR); */ + if (fluid_midi_file_skip(mf, skip) != FLUID_OK) { + return FLUID_FAILED; + } + } + } + if (fluid_midi_file_eof(mf)) { + FLUID_LOG(FLUID_ERR, "Unexpected end of file"); + return FLUID_FAILED; + } + return FLUID_OK; +} + +/* + * fluid_midi_file_read_varlen + */ +int +fluid_midi_file_read_varlen(fluid_midi_file *mf) +{ + int i; + int c; + mf->varlen = 0; + for (i = 0;; i++) { + if (i == 4) { + FLUID_LOG(FLUID_ERR, "Invalid variable length number"); + return FLUID_FAILED; + } + c = fluid_midi_file_getc(mf); + if (c < 0) { + FLUID_LOG(FLUID_ERR, "Unexpected end of file"); + return FLUID_FAILED; + } + if (c & 0x80) { + mf->varlen |= (int) (c & 0x7F); + mf->varlen <<= 7; + } else { + mf->varlen += c; + break; + } + } + return FLUID_OK; +} + +/* + * fluid_midi_file_read_event + */ +//could be this, right...it only takes the file and the track as args +int +fluid_midi_file_read_event(fluid_midi_file *mf, fluid_track_t *track) +{ + int status; + int type; + int tempo; + unsigned char *metadata = NULL; + unsigned char *dyn_buf = NULL; + unsigned char static_buf[256]; + int nominator, denominator, clocks, notes; + fluid_midi_event_t *evt; //do you know what evt is? is that the event struct? + int channel = 0; + int param1 = 0; + int param2 = 0; + int size; + + /* read the delta-time of the event */ + if (fluid_midi_file_read_varlen(mf) != FLUID_OK) { + return FLUID_FAILED; + } + mf->dtime += mf->varlen; + + /* read the status byte */ + status = fluid_midi_file_getc(mf); + if (status < 0) { + FLUID_LOG(FLUID_ERR, "Unexpected end of file"); + return FLUID_FAILED; + } + + /* not a valid status byte: use the running status instead */ + if ((status & 0x80) == 0) { + if ((mf->running_status & 0x80) == 0) { + FLUID_LOG(FLUID_ERR, "Undefined status and invalid running status"); + return FLUID_FAILED; + } + fluid_midi_file_push(mf, status); + status = mf->running_status; + } + + /* check what message we have */ + + mf->running_status = status; + + if ((status == MIDI_SYSEX)) { /* system exclusif */ + /* read the length of the message */ + if (fluid_midi_file_read_varlen(mf) != FLUID_OK) { + return FLUID_FAILED; + } + + if (mf->varlen) { + FLUID_LOG(FLUID_DBG, "%s: %d: alloc metadata, len = %d", __FILE__, + __LINE__, mf->varlen); + metadata = FLUID_MALLOC(mf->varlen + 1); + + if (metadata == NULL) { + FLUID_LOG(FLUID_PANIC, "Out of memory"); + return FLUID_FAILED; + } + + /* read the data of the message */ + if (fluid_midi_file_read(mf, metadata, mf->varlen) != FLUID_OK) { + FLUID_FREE (metadata); + return FLUID_FAILED; + } + + evt = new_fluid_midi_event(); + if (evt == NULL) { + FLUID_LOG(FLUID_ERR, "Out of memory"); + FLUID_FREE (metadata); + return FLUID_FAILED; + } + + evt->dtime = mf->dtime; + size = mf->varlen; + + if (metadata[mf->varlen - 1] == MIDI_EOX) + size--; + + /* Add SYSEX event and indicate that its dynamically allocated and should be freed with event */ + fluid_midi_event_set_sysex(evt, metadata, size, TRUE); + fluid_track_add_event(track, evt); + mf->dtime = 0; + } + + return FLUID_OK; + + } else if (status == MIDI_META_EVENT) { /* meta events */ + + int result = FLUID_OK; + + /* get the type of the meta message */ + type = fluid_midi_file_getc(mf); + if (type < 0) { + FLUID_LOG(FLUID_ERR, "Unexpected end of file"); + return FLUID_FAILED; + } + + /* get the length of the data part */ + if (fluid_midi_file_read_varlen(mf) != FLUID_OK) { + return FLUID_FAILED; + } + + if (mf->varlen < 255) { + metadata = &static_buf[0]; + } else { + FLUID_LOG(FLUID_DBG, "%s: %d: alloc metadata, len = %d", __FILE__, + __LINE__, mf->varlen); + dyn_buf = FLUID_MALLOC(mf->varlen + 1); + if (dyn_buf == NULL) { + FLUID_LOG(FLUID_PANIC, "Out of memory"); + return FLUID_FAILED; + } + metadata = dyn_buf; + } + + /* read the data */ + if (mf->varlen) { + if (fluid_midi_file_read(mf, metadata, mf->varlen) != FLUID_OK) { + if (dyn_buf) { + FLUID_FREE(dyn_buf); + } + return FLUID_FAILED; + } + } + + /* handle meta data */ + switch (type) { + + case MIDI_COPYRIGHT: + metadata[mf->varlen] = 0; + break; + + case MIDI_TRACK_NAME: + metadata[mf->varlen] = 0; + fluid_track_set_name(track, (char *) metadata); + break; + + case MIDI_INST_NAME: + metadata[mf->varlen] = 0; + break; + + case MIDI_LYRIC: + break; + + case MIDI_MARKER: + break; + + case MIDI_CUE_POINT: + break; /* don't care much for text events */ + + case MIDI_EOT: + if (mf->varlen != 0) { + FLUID_LOG(FLUID_ERR, "Invalid length for EndOfTrack event"); + result = FLUID_FAILED; + break; + } + mf->eot = 1; + evt = new_fluid_midi_event(); + if (evt == NULL) { + FLUID_LOG(FLUID_ERR, "Out of memory"); + result = FLUID_FAILED; + break; + } + evt->dtime = mf->dtime; + evt->type = MIDI_EOT; + fluid_track_add_event(track, evt); + mf->dtime = 0; + break; + + case MIDI_SET_TEMPO: + if (mf->varlen != 3) { + FLUID_LOG(FLUID_ERR, + "Invalid length for SetTempo meta event"); + result = FLUID_FAILED; + break; + } + tempo = (metadata[0] << 16) + (metadata[1] << 8) + metadata[2]; + evt = new_fluid_midi_event(); + if (evt == NULL) { + FLUID_LOG(FLUID_ERR, "Out of memory"); + result = FLUID_FAILED; + break; + } + evt->dtime = mf->dtime; + evt->type = MIDI_SET_TEMPO; + evt->channel = 0; + evt->param1 = tempo; + evt->param2 = 0; + fluid_track_add_event(track, evt); + mf->dtime = 0; + break; + + case MIDI_SMPTE_OFFSET: + if (mf->varlen != 5) { + FLUID_LOG(FLUID_ERR, + "Invalid length for SMPTE Offset meta event"); + result = FLUID_FAILED; + break; + } + break; /* we don't use smtp */ + + case MIDI_TIME_SIGNATURE: + if (mf->varlen != 4) { + FLUID_LOG(FLUID_ERR, + "Invalid length for TimeSignature meta event"); + result = FLUID_FAILED; + break; + } + nominator = metadata[0]; + denominator = pow(2.0, (double) metadata[1]); + clocks = metadata[2]; + notes = metadata[3]; + + FLUID_LOG(FLUID_DBG, + "signature=%d/%d, metronome=%d, 32nd-notes=%d", + nominator, denominator, clocks, notes); + + break; + + case MIDI_KEY_SIGNATURE: + if (mf->varlen != 2) { + FLUID_LOG(FLUID_ERR, + "Invalid length for KeySignature meta event"); + result = FLUID_FAILED; + break; + } + /* We don't care about key signatures anyway */ + /* sf = metadata[0]; + mi = metadata[1]; */ + break; + + case MIDI_SEQUENCER_EVENT: + break; + + default: + break; + } + + if (dyn_buf) { + FLUID_LOG(FLUID_DBG, "%s: %d: free metadata", __FILE__, __LINE__); + FLUID_FREE(dyn_buf); + } + + return result; + + } else { /* channel messages */ + + type = status & 0xf0; + channel = status & 0x0f; + + /* all channel message have at least 1 byte of associated data */ + if ((param1 = fluid_midi_file_getc(mf)) < 0) { + FLUID_LOG(FLUID_ERR, "Unexpected end of file"); + return FLUID_FAILED; + } + + switch (type) { + + case NOTE_ON: + if ((param2 = fluid_midi_file_getc(mf)) < 0) { + FLUID_LOG(FLUID_ERR, "Unexpected end of file"); + return FLUID_FAILED; + } + break; + + case NOTE_OFF: + if ((param2 = fluid_midi_file_getc(mf)) < 0) { + FLUID_LOG(FLUID_ERR, "Unexpected end of file"); + return FLUID_FAILED; + } + break; + + case KEY_PRESSURE: + if ((param2 = fluid_midi_file_getc(mf)) < 0) { + FLUID_LOG(FLUID_ERR, "Unexpected end of file"); + return FLUID_FAILED; + } + break; + + case CONTROL_CHANGE: + if ((param2 = fluid_midi_file_getc(mf)) < 0) { + FLUID_LOG(FLUID_ERR, "Unexpected end of file"); + return FLUID_FAILED; + } + break; + + case PROGRAM_CHANGE: + break; + + case CHANNEL_PRESSURE: + break; + + case PITCH_BEND: + if ((param2 = fluid_midi_file_getc(mf)) < 0) { + FLUID_LOG(FLUID_ERR, "Unexpected end of file"); + return FLUID_FAILED; + } + + param1 = ((param2 & 0x7f) << 7) | (param1 & 0x7f); + param2 = 0; + break; + + default: + /* Can't possibly happen !? */ + FLUID_LOG(FLUID_ERR, "Unrecognized MIDI event"); + return FLUID_FAILED; + } + evt = new_fluid_midi_event(); + if (evt == NULL) { + FLUID_LOG(FLUID_ERR, "Out of memory"); + return FLUID_FAILED; + } + evt->dtime = mf->dtime; + evt->type = type; + evt->channel = channel; + evt->param1 = param1; + evt->param2 = param2; + fluid_track_add_event(track, evt); + mf->dtime = 0; + } + return FLUID_OK; +} + +/* + * fluid_midi_file_get_division + */ +int +fluid_midi_file_get_division(fluid_midi_file *midifile) +{ + return midifile->division; +} + +/****************************************************** + * + * fluid_track_t + */ + +/** + * MIDI EVENT DEFINED HERE...do we need to figure out exactly how this ties in to cli-dssi-host next? not yet, for now we need working midi loading from file and listing events according to each time sample. should we write a print event function? yeah in main() custom.c + * Create a MIDI event structure. + * @return New MIDI event structure or NULL when out of memory. + */ +fluid_midi_event_t * +new_fluid_midi_event () +{ + fluid_midi_event_t* evt; //oh it's just the name + evt = FLUID_NEW(fluid_midi_event_t); + if (evt == NULL) { + FLUID_LOG(FLUID_ERR, "Out of memory"); + return NULL; + } + evt->dtime = 0; + evt->type = 0; + evt->channel = 0; + evt->param1 = 0; + evt->param2 = 0; + evt->next = NULL; + evt->paramptr = NULL; + return evt; +} + +/** + * Delete MIDI event structure. + * @param evt MIDI event structure + * @return Always returns #FLUID_OK + */ +int +delete_fluid_midi_event(fluid_midi_event_t *evt) +{ + fluid_midi_event_t *temp; + + while (evt) { + temp = evt->next; + + /* Dynamic SYSEX event? - free (param2 indicates if dynamic) */ + if (evt->type == MIDI_SYSEX && evt->paramptr && evt->param2) + FLUID_FREE (evt->paramptr); + + FLUID_FREE(evt); + evt = temp; + } + return FLUID_OK; +} + +/** + * Get the event type field of a MIDI event structure. + * @param evt MIDI event structure + * @return Event type field (MIDI status byte without channel) + */ +int +fluid_midi_event_get_type(fluid_midi_event_t *evt) +{ + return evt->type; +} + +/** + * Set the event type field of a MIDI event structure. + * @param evt MIDI event structure + * @param type Event type field (MIDI status byte without channel) + * @return Always returns #FLUID_OK + */ +int +fluid_midi_event_set_type(fluid_midi_event_t *evt, int type) +{ + evt->type = type; + return FLUID_OK; +} + +/** + * Get the channel field of a MIDI event structure. + * @param evt MIDI event structure + * @return Channel field + */ +int +fluid_midi_event_get_channel(fluid_midi_event_t *evt) +{ + return evt->channel; +} + +/** + * Set the channel field of a MIDI event structure. + * @param evt MIDI event structure + * @param chan MIDI channel field + * @return Always returns #FLUID_OK + */ +int +fluid_midi_event_set_channel(fluid_midi_event_t *evt, int chan) +{ + evt->channel = chan; + return FLUID_OK; +} + +/** + * Get the key field of a MIDI event structure. + * @param evt MIDI event structure + * @return MIDI note number (0-127) + */ +int +fluid_midi_event_get_key(fluid_midi_event_t *evt) +{ + return evt->param1; +} + +/** + * Set the key field of a MIDI event structure. + * @param evt MIDI event structure + * @param v MIDI note number (0-127) + * @return Always returns #FLUID_OK + */ +int +fluid_midi_event_set_key(fluid_midi_event_t *evt, int v) +{ + evt->param1 = v; + return FLUID_OK; +} + +/** + * Get the velocity field of a MIDI event structure. + * @param evt MIDI event structure + * @return MIDI velocity number (0-127) + */ +int +fluid_midi_event_get_velocity(fluid_midi_event_t *evt) +{ + return evt->param2; +} + +/** + * Set the velocity field of a MIDI event structure. + * @param evt MIDI event structure + * @param v MIDI velocity value + * @return Always returns #FLUID_OK + */ +int +fluid_midi_event_set_velocity(fluid_midi_event_t *evt, int v) +{ + evt->param2 = v; + return FLUID_OK; +} + +/** + * Get the control number of a MIDI event structure. + * @param evt MIDI event structure + * @return MIDI control number + */ +int +fluid_midi_event_get_control(fluid_midi_event_t *evt) +{ + return evt->param1; +} + +/** + * Set the control field of a MIDI event structure. + * @param evt MIDI event structure + * @param v MIDI control number + * @return Always returns #FLUID_OK + */ +int +fluid_midi_event_set_control(fluid_midi_event_t *evt, int v) +{ + evt->param1 = v; + return FLUID_OK; +} + +/** + * Get the value field from a MIDI event structure. + * @param evt MIDI event structure + * @return Value field + */ +int +fluid_midi_event_get_value(fluid_midi_event_t *evt) +{ + return evt->param2; +} + +/** + * Set the value field of a MIDI event structure. + * @param evt MIDI event structure + * @param v Value to assign + * @return Always returns #FLUID_OK + */ +int +fluid_midi_event_set_value(fluid_midi_event_t *evt, int v) +{ + evt->param2 = v; + return FLUID_OK; +} + +/** + * Get the program field of a MIDI event structure. + * @param evt MIDI event structure + * @return MIDI program number (0-127) + */ +int +fluid_midi_event_get_program(fluid_midi_event_t *evt) +{ + return evt->param1; +} + +/** + * Set the program field of a MIDI event structure. + * @param evt MIDI event structure + * @param val MIDI program number (0-127) + * @return Always returns #FLUID_OK + */ +int +fluid_midi_event_set_program(fluid_midi_event_t *evt, int val) +{ + evt->param1 = val; + return FLUID_OK; +} + +/** + * Get the pitch field of a MIDI event structure. + * @param evt MIDI event structure + * @return Pitch value (14 bit value, 0-16383, 8192 is center) + */ +int +fluid_midi_event_get_pitch(fluid_midi_event_t *evt) +{ + return evt->param1; +} + +/** + * Set the pitch field of a MIDI event structure. + * @param evt MIDI event structure + * @param val Pitch value (14 bit value, 0-16383, 8192 is center) + * @return Always returns FLUID_OK + */ +int +fluid_midi_event_set_pitch(fluid_midi_event_t *evt, int val) +{ + evt->param1 = val; + return FLUID_OK; +} + +/** + * Assign sysex data to a MIDI event structure. + * @param evt MIDI event structure + * @param data Pointer to SYSEX data + * @param size Size of SYSEX data + * @param dynamic TRUE if the SYSEX data has been dynamically allocated and + * should be freed when the event is freed (only applies if event gets destroyed + * with delete_fluid_midi_event()) + * @return Always returns #FLUID_OK + * + * NOTE: Unlike the other event assignment functions, this one sets evt->type. + */ +int +fluid_midi_event_set_sysex(fluid_midi_event_t *evt, void *data, int size, int dynamic) +{ + evt->type = MIDI_SYSEX; + evt->paramptr = data; + evt->param1 = size; + evt->param2 = dynamic; + return FLUID_OK; +} + +/****************************************************** + * + * fluid_track_t + */ + +/* + * new_fluid_track + */ +fluid_track_t * +new_fluid_track(int num) +{ + fluid_track_t *track; + track = FLUID_NEW(fluid_track_t); + if (track == NULL) { + return NULL; + } + track->name = NULL; + track->num = num; + track->first = NULL; + track->cur = NULL; + track->last = NULL; + track->ticks = 0; + return track; +} + +/* + * delete_fluid_track + */ +int +delete_fluid_track(fluid_track_t *track) +{ + if (track->name != NULL) { + FLUID_FREE(track->name); + } + if (track->first != NULL) { + delete_fluid_midi_event(track->first); + } + FLUID_FREE(track); + return FLUID_OK; +} + +/* + * fluid_track_set_name + */ +int +fluid_track_set_name(fluid_track_t *track, char *name) +{ + int len; + if (track->name != NULL) { + FLUID_FREE(track->name); + } + if (name == NULL) { + track->name = NULL; + return FLUID_OK; + } + len = FLUID_STRLEN(name); + track->name = FLUID_MALLOC(len + 1); + if (track->name == NULL) { + FLUID_LOG(FLUID_ERR, "Out of memory"); + return FLUID_FAILED; + } + FLUID_STRCPY(track->name, name); + return FLUID_OK; +} + +/* + * fluid_track_get_name + */ +char * +fluid_track_get_name(fluid_track_t *track) +{ + return track->name; +} + +/* + * fluid_track_get_duration + */ +int +fluid_track_get_duration(fluid_track_t *track) +{ + int time = 0; + fluid_midi_event_t *evt = track->first; + while (evt != NULL) { + time += evt->dtime; + evt = evt->next; + } + return time; +} + +/* + * fluid_track_count_events + */ +int +fluid_track_count_events(fluid_track_t *track, int *on, int *off) +{ + fluid_midi_event_t *evt = track->first; + while (evt != NULL) { + if (evt->type == NOTE_ON) { + (*on)++; + } else if (evt->type == NOTE_OFF) { + (*off)++; + } + evt = evt->next; + } + return FLUID_OK; +} + +/* + * fluid_track_add_event + */ +int +fluid_track_add_event(fluid_track_t *track, fluid_midi_event_t *evt) +{ + evt->next = NULL; + if (track->first == NULL) { + track->first = evt; + track->cur = evt; + track->last = evt; + } else { + track->last->next = evt; + track->last = evt; + } + return FLUID_OK; +} + +/* + * fluid_track_first_event + */ +fluid_midi_event_t * +fluid_track_first_event(fluid_track_t *track) +{ + track->cur = track->first; + return track->cur; +} + +/* + * fluid_track_next_event + */ +fluid_midi_event_t * +fluid_track_next_event(fluid_track_t *track) +{ + if (track->cur != NULL) { + track->cur = track->cur->next; + } + return track->cur; +} + +/* + * fluid_track_reset + */ +int +fluid_track_reset(fluid_track_t *track) +{ + track->ticks = 0; + track->cur = track->first; + return FLUID_OK; +} + +/* + * fluid_track_send_events + */ +int +fluid_track_send_events(fluid_track_t *track, + fluid_synth_t *synth, //will the null pointer cause an error here? nope, will work //doesn't use it? here... I guess not + fluid_player_t *player, + unsigned int ticks) +{ + int status = FLUID_OK; + fluid_midi_event_t *event; + + while (1) { + + event = track->cur; + if (event == NULL) { + return status; + } + +//ok it should be printing here, right? well need to check flow again, starting from load_tracks if it reach this function or not +//do we decide now to just listen for note on and note off, and keep track of the two? yep +//run_synth also takes velocity, so maybe we should add that as well? i guess so + + if (track->ticks + event->dtime > ticks) { + return status; + } + + track->ticks += event->dtime; + + if (!player || event->type == MIDI_EOT) { + } + else if (event->type == MIDI_SET_TEMPO) { + fluid_player_set_midi_tempo(player, event->param1); + } + else { + if (player->playback_callback) + player->playback_callback(player->playback_userdata, event); + } + + fluid_track_next_event(track); + + } + return status; +} + +/****************************************************** + * + * fluid_player + */ + +/** + * create a new midi player. + * @param synth fluid synthesizer instance to create player for + * @return New MIDI player instance or NULL on error (out of memory) + */ +//ok how do we call new_fluid_player with the filename of the midifile? wmayeb call fluid_player_load() with file name +fluid_player_t * +new_fluid_player(void) +{ + int i; + fluid_player_t *player; + player = FLUID_NEW(fluid_player_t); + if (player == NULL) { + FLUID_LOG(FLUID_ERR, "Out of memory"); + return NULL; + } + player->status = FLUID_PLAYER_READY; + player->loop = 1; + player->ntracks = 0; + for (i = 0; i < MAX_NUMBER_OF_TRACKS; i++) { + player->track[i] = NULL; + } + player->synth = NULL; +// player->system_timer = NULL; +// player->sample_timer = NULL; + player->playlist = NULL; + player->currentfile = NULL; + player->division = 0; + player->send_program_change = 1; + player->miditempo = 480000; + player->deltatime = 4.0; + player->cur_msec = 0; + player->cur_ticks = 0; + // FIXME fluid_player_set_playback_callback(player, fluid_synth_handle_midi_event, synth); + + // player->use_system_timer = fluid_settings_str_equal(synth->settings, + // "player.timing-source", "system"); + + //fluid_settings_getint(synth->settings, "player.reset-synth", &i); + player->reset_synth_between_songs = i; + + return player; +} + +/** + * Delete a MIDI player instance. + * @param player MIDI player instance + * @return Always returns #FLUID_OK + */ +int +delete_fluid_player(fluid_player_t *player) +{ + fluid_list_t *q; + fluid_playlist_item* pi; + + if (player == NULL) { + return FLUID_OK; + } + fluid_player_stop(player); + fluid_player_reset(player); + + while (player->playlist != NULL) { + q = player->playlist->next; + pi = (fluid_playlist_item*) player->playlist->data; + FLUID_FREE(pi->filename); + FLUID_FREE(pi->buffer); + FLUID_FREE(pi); + delete1_fluid_list(player->playlist); + player->playlist = q; + } + + FLUID_FREE(player); + return FLUID_OK; +} + +/** + * Registers settings related to the MIDI player + */ +//void +//fluid_player_settings(fluid_settings_t *settings) +//{ + /* player.timing-source can be either "system" (use system timer) + or "sample" (use timer based on number of written samples) */ + /* + fluid_settings_register_str(settings, "player.timing-source", "sample", 0, + NULL, NULL); + fluid_settings_add_option(settings, "player.timing-source", "sample"); + fluid_settings_add_option(settings, "player.timing-source", "system"); + + Selects whether the player should reset the synth between songs, or not. + fluid_settings_register_int(settings, "player.reset-synth", 1, 0, 1, + FLUID_HINT_TOGGLED, NULL, NULL); +*/ +//} + + +int +fluid_player_reset(fluid_player_t *player) +{ + int i; + + for (i = 0; i < MAX_NUMBER_OF_TRACKS; i++) { + if (player->track[i] != NULL) { + delete_fluid_track(player->track[i]); + player->track[i] = NULL; + } + } + /* player->current_file = NULL; */ + /* player->status = FLUID_PLAYER_READY; */ + /* player->loop = 1; */ + player->ntracks = 0; + player->division = 0; + player->send_program_change = 1; + player->miditempo = 480000; + player->deltatime = 4.0; + return 0; +} + +/* + * fluid_player_add_track + */ +int +fluid_player_add_track(fluid_player_t *player, fluid_track_t *track) +{ + if (player->ntracks < MAX_NUMBER_OF_TRACKS) { + player->track[player->ntracks++] = track; + return FLUID_OK; + } else { + return FLUID_FAILED; + } +} + +/* + * fluid_player_count_tracks + */ +int +fluid_player_count_tracks(fluid_player_t *player) +{ + return player->ntracks; +} + +/* + * fluid_player_get_track + */ +fluid_track_t * +fluid_player_get_track(fluid_player_t *player, int i) +{ + if ((i >= 0) && (i < MAX_NUMBER_OF_TRACKS)) { + return player->track[i]; + } else { + return NULL; + } +} + +/** + * Change the MIDI callback function. This is usually set to + * fluid_synth_handle_midi_event, but can optionally be changed + * to a user-defined function instead, for intercepting all MIDI + * messages sent to the synth. You can also use a midi router as + * the callback function to modify the MIDI messages before sending + * them to the synth. + * @param player MIDI player instance + * @param handler Pointer to callback function + * @param handler_data Parameter sent to the callback function + * @returns FLUID_OK + * @since 1.1.4 + */ +int +fluid_player_set_playback_callback(fluid_player_t* player, + handle_midi_event_func_t handler, void* handler_data) +{ + player->playback_callback = handler; + player->playback_userdata = handler_data; + return FLUID_OK; +} + +/** + * Add a MIDI file to a player queue. + * @param player MIDI player instance + * @param midifile File name of the MIDI file to add + * @return #FLUID_OK or #FLUID_FAILED + */ +int +fluid_player_add(fluid_player_t *player, const char *midifile) +{ + fluid_playlist_item *pi = FLUID_MALLOC(sizeof(fluid_playlist_item)); + char* f = FLUID_STRDUP(midifile); + if (!pi || !f) { + FLUID_FREE(pi); + FLUID_FREE(f); + FLUID_LOG(FLUID_PANIC, "Out of memory"); + return FLUID_FAILED; + } + + pi->filename = f; + pi->buffer = NULL; + pi->buffer_len = 0; + player->playlist = fluid_list_append(player->playlist, pi); + return FLUID_OK; +} + +/** + * Add a MIDI file to a player queue, from a buffer in memory. + * @param player MIDI player instance + * @param buffer Pointer to memory containing the bytes of a complete MIDI + * file. The data is copied, so the caller may free or modify it immediately + * without affecting the playlist. + * @param len Length of the buffer, in bytes. + * @return #FLUID_OK or #FLUID_FAILED + */ +int +fluid_player_add_mem(fluid_player_t* player, const void *buffer, size_t len) +{ + /* Take a copy of the buffer, so the caller can free immediately. */ + fluid_playlist_item *pi = FLUID_MALLOC(sizeof(fluid_playlist_item)); + void *buf_copy = FLUID_MALLOC(len); + if (!pi || !buf_copy) { + FLUID_FREE(pi); + FLUID_FREE(buf_copy); + FLUID_LOG(FLUID_PANIC, "Out of memory"); + return FLUID_FAILED; + } + + FLUID_MEMCPY(buf_copy, buffer, len); + pi->filename = NULL; + pi->buffer = buf_copy; + pi->buffer_len = len; + player->playlist = fluid_list_append(player->playlist, pi); + return FLUID_OK; +} + +/* + * fluid_player_load + */ +int +fluid_player_load(fluid_player_t *player, fluid_playlist_item *item) +{ + puts("inside fluid player load"); + //ok do you think this is the function we need to check? well i found it, this long printf i uncommented wasn't used, we need to use fluid_track_send_events inside loop over loaded tracks to actually send events into our callback. ok + fluid_midi_file *midifile; + char* buffer; + size_t buffer_length; + int buffer_owned; + if (item->filename != NULL) + { + fluid_file fp; + /* This file is specified by filename; load the file from disk */ + FLUID_LOG(FLUID_DBG, "%s: %d: Loading midifile %s", __FILE__, __LINE__, + item->filename); + /* Read the entire contents of the file into the buffer */ + fp = FLUID_FOPEN(item->filename, "rb"); + if (fp == NULL) { + FLUID_LOG(FLUID_ERR, "Couldn't open the MIDI file"); + return FLUID_FAILED; + } + buffer = fluid_file_read_full(fp, &buffer_length); + if (buffer == NULL) + { + FLUID_FCLOSE(fp); + return FLUID_FAILED; + } + buffer_owned = 1; + FLUID_FCLOSE(fp); + } + else + { + /* This file is specified by a pre-loaded buffer; load from memory */ + FLUID_LOG(FLUID_DBG, "%s: %d: Loading midifile from memory (%p)", + __FILE__, __LINE__, item->buffer); + buffer = (char *) item->buffer; + buffer_length = item->buffer_len; + /* Do not free the buffer (it is owned by the playlist) */ + buffer_owned = 0; + } +// here file + midifile = new_fluid_midi_file(buffer, buffer_length); + if (midifile == NULL) { + if (buffer_owned) { + FLUID_FREE(buffer); + } + return FLUID_FAILED; + } + player->division = fluid_midi_file_get_division(midifile); + //DIVISION SET HERE + fluid_player_set_midi_tempo(player, player->miditempo); // Update deltatime + /*FLUID_LOG(FLUID_DBG, "quarter note division=%d\n", player->division); */ +// here it load tracks + if (fluid_midi_file_load_tracks(midifile, player) != FLUID_OK) { + if (buffer_owned) { + FLUID_FREE(buffer); + } + delete_fluid_midi_file(midifile); + return FLUID_FAILED; + } + delete_fluid_midi_file(midifile); + if (buffer_owned) { + FLUID_FREE(buffer); + } + return FLUID_OK; +} + +void +fluid_player_advancefile(fluid_player_t *player) +{ + if (player->playlist == NULL) { + return; /* No files to play */ + } + if (player->currentfile != NULL) { + player->currentfile = fluid_list_next(player->currentfile); + } + if (player->currentfile == NULL) { + if (player->loop == 0) { + return; /* We're done playing */ + } + if (player->loop > 0) { + player->loop--; + } + player->currentfile = player->playlist; + } +} + +void +fluid_player_playlist_load(fluid_player_t *player, unsigned int msec) +{ + fluid_playlist_item* current_playitem; + int i; + + do { + fluid_player_advancefile(player); + if (player->currentfile == NULL) { + /* Failed to find next song, probably since we're finished */ + player->status = FLUID_PLAYER_DONE; + return; + } + + fluid_player_reset(player); + current_playitem = (fluid_playlist_item *) player->currentfile->data; + } while (fluid_player_load(player, current_playitem) != FLUID_OK); + + /* Successfully loaded midi file */ + + player->begin_msec = msec; + player->start_msec = msec; + player->start_ticks = 0; + player->cur_ticks = 0; + +// if (player->reset_synth_between_songs) { +// fluid_synth_system_reset(player->synth); +// } + + for (i = 0; i < player->ntracks; i++) { + if (player->track[i] != NULL) { + fluid_track_reset(player->track[i]); + } + } +} + + +/* + * fluid_player_callback + */ +//ok so I guess it all starts here? not sure, new_fluid_player starts a new player struct, then we need fluid_player_load(player, playlist_item) +//and playlist_item have filename of file to load got it +int +fluid_player_callback(void *data, unsigned int msec) +{ + int i; + int loadnextfile; + int status = FLUID_PLAYER_DONE; + fluid_player_t *player; + fluid_synth_t *synth; + player = (fluid_player_t *) data; + synth = player->synth; + + loadnextfile = player->currentfile == NULL ? 1 : 0; + do { + if (loadnextfile) { + loadnextfile = 0; + fluid_player_playlist_load(player, msec); + if (player->currentfile == NULL) { + return 0; + } + } + + player->cur_msec = msec; + player->cur_ticks = (player->start_ticks + + (int) ((double) (player->cur_msec - player->start_msec) + / player->deltatime)); + + for (i = 0; i < player->ntracks; i++) { + if (!fluid_track_eot(player->track[i])) { + status = FLUID_PLAYER_PLAYING; + if (fluid_track_send_events(player->track[i], synth, player, + player->cur_ticks) != FLUID_OK) { + /* */ + } + } + } + + if (status == FLUID_PLAYER_DONE) { + FLUID_LOG(FLUID_DBG, "%s: %d: Duration=%.3f sec", __FILE__, + __LINE__, (msec - player->begin_msec) / 1000.0); + loadnextfile = 1; + } + } while (loadnextfile); + + player->status = status; + + return 1; +} + +/** + * Activates play mode for a MIDI player if not already playing. + * @param player MIDI player instance + * @return #FLUID_OK on success, #FLUID_FAILED otherwise + */ +int +fluid_player_play(fluid_player_t *player) +{ + if (player->status == FLUID_PLAYER_PLAYING) { + return FLUID_OK; + } + + if (player->playlist == NULL) { + return FLUID_OK; + } + + player->status = FLUID_PLAYER_PLAYING; + +/* + if (player->use_system_timer) { + player->system_timer = new_fluid_timer((int) player->deltatime, + fluid_player_callback, (void *) player, TRUE, FALSE, TRUE); + if (player->system_timer == NULL) { + return FLUID_FAILED; + } + } else { + player->sample_timer = new_fluid_sample_timer(player->synth, + fluid_player_callback, (void *) player); + + if (player->sample_timer == NULL) { + return FLUID_FAILED; + } + } + */ + return FLUID_OK; +} + +/** + * Stops a MIDI player. + * @param player MIDI player instance + * @return Always returns #FLUID_OK + */ +int +fluid_player_stop(fluid_player_t *player) +{ +/* if (player->system_timer != NULL) { + delete_fluid_timer(player->system_timer); + } + if (player->sample_timer != NULL) { + delete_fluid_sample_timer(player->synth, player->sample_timer); + }*/ + player->status = FLUID_PLAYER_DONE; +// player->sample_timer = NULL; + // player->system_timer = NULL; + return FLUID_OK; +} + +/** + * Get MIDI player status. + * @param player MIDI player instance + * @return Player status (#fluid_player_status) + * @since 1.1.0 + */ +int +fluid_player_get_status(fluid_player_t *player) +{ + return player->status; +} + +/** + * Enable looping of a MIDI player + * @param player MIDI player instance + * @param loop Times left to loop the playlist. -1 means loop infinitely. + * @return Always returns #FLUID_OK + * @since 1.1.0 + * + * For example, if you want to loop the playlist twice, set loop to 2 + * and call this function before you start the player. + */ +int fluid_player_set_loop(fluid_player_t *player, int loop) +{ + player->loop = loop; + return FLUID_OK; +} + +/** + * Set the tempo of a MIDI player. + * @param player MIDI player instance + * @param tempo Tempo to set playback speed to (in microseconds per quarter note, as per MIDI file spec) + * @return Always returns #FLUID_OK + */ +int fluid_player_set_midi_tempo(fluid_player_t *player, int tempo) +{ + player->miditempo = tempo; + //DIVISION CALCULATION MADE HERE + //USE THIS TO FIX MRS WATSON + player->deltatime = (double) tempo / player->division / 1000.0; /* in milliseconds */ + player->start_msec = player->cur_msec; + player->start_ticks = player->cur_ticks; + + FLUID_LOG(FLUID_DBG, + "tempo=%d, tick time=%f msec, cur time=%d msec, cur tick=%d", + tempo, player->deltatime, player->cur_msec, player->cur_ticks); +//so this would be player->cur_msec? yes + return FLUID_OK; +} + +/** + * Set the tempo of a MIDI player in beats per minute. + * @param player MIDI player instance + * @param bpm Tempo in beats per minute + * @return Always returns #FLUID_OK + */ +int +fluid_player_set_bpm(fluid_player_t *player, int bpm) +{ + return fluid_player_set_midi_tempo(player, (int) ((double) 60 * 1e6 / bpm)); +} + +/** + * Wait for a MIDI player to terminate (when done playing). + * @param player MIDI player instance + * @return #FLUID_OK on success, #FLUID_FAILED otherwise + */ +/* +int +fluid_player_join(fluid_player_t *player) +{ + if (player->system_timer) { + return fluid_timer_join(player->system_timer); + } else if (player->sample_timer) { + while (player->status != FLUID_PLAYER_DONE) { +#if defined(WIN32) + Sleep(10); +#else + usleep(10000); +#endif + } + } + return FLUID_OK; +} +*/ +/************************************************************************ + * MIDI PARSER + * + */ + +/* + * new_fluid_midi_parser + */ +fluid_midi_parser_t * +new_fluid_midi_parser () +{ + fluid_midi_parser_t *parser; + parser = FLUID_NEW(fluid_midi_parser_t); + if (parser == NULL) { + FLUID_LOG(FLUID_ERR, "Out of memory"); + return NULL; + } + parser->status = 0; /* As long as the status is 0, the parser won't do anything -> no need to initialize all the fields. */ + return parser; +} + +/* + * delete_fluid_midi_parser + */ +int +delete_fluid_midi_parser(fluid_midi_parser_t *parser) +{ + FLUID_FREE(parser); + return FLUID_OK; +} + +/** + * Parse a MIDI stream one character at a time. + * @param parser Parser instance + * @param c Next character in MIDI stream + * @return A parsed MIDI event or NULL if none. Event is internal and should + * not be modified or freed and is only valid until next call to this function. + */ +fluid_midi_event_t * +fluid_midi_parser_parse(fluid_midi_parser_t *parser, unsigned char c) +{ + fluid_midi_event_t *event; + + /* Real-time messages (0xF8-0xFF) can occur anywhere, even in the middle + * of another message. */ + if (c >= 0xF8) { + if (c == MIDI_SYSTEM_RESET) { + parser->event.type = c; + parser->status = 0; /* clear the status */ + return &parser->event; + } + + return NULL; + } + + /* Status byte? - If previous message not yet complete, it is discarded (re-sync). */ + if (c & 0x80) { + /* Any status byte terminates SYSEX messages (not just 0xF7) */ + if (parser->status == MIDI_SYSEX && parser->nr_bytes > 0) { + event = &parser->event; + fluid_midi_event_set_sysex(event, parser->data, parser->nr_bytes, + FALSE); + } else + event = NULL; + + if (c < 0xF0) /* Voice category message? */ + { + parser->channel = c & 0x0F; + parser->status = c & 0xF0; + + /* The event consumes x bytes of data... (subtract 1 for the status byte) */ + parser->nr_bytes_total = fluid_midi_event_length(parser->status) + - 1; + + parser->nr_bytes = 0; /* 0 bytes read so far */ + } else if (c == MIDI_SYSEX) { + parser->status = MIDI_SYSEX; + parser->nr_bytes = 0; + } else + parser->status = 0; /* Discard other system messages (0xF1-0xF7) */ + + return event; /* Return SYSEX event or NULL */ + } + + /* Data/parameter byte */ + + /* Discard data bytes for events we don't care about */ + if (parser->status == 0) + return NULL; + + /* Max data size exceeded? (SYSEX messages only really) */ + if (parser->nr_bytes == FLUID_MIDI_PARSER_MAX_DATA_SIZE) { + parser->status = 0; /* Discard the rest of the message */ + return NULL; + } + + /* Store next byte */ + parser->data[parser->nr_bytes++] = c; + + /* Do we still need more data to get this event complete? */ + if (parser->nr_bytes < parser->nr_bytes_total) + return NULL; + + /* Event is complete, return it. + * Running status byte MIDI feature is also handled here. */ + parser->event.type = parser->status; + parser->event.channel = parser->channel; + parser->nr_bytes = 0; /* Reset data size, in case there are additional running status messages */ + + switch (parser->status) { + case NOTE_OFF: + case NOTE_ON: + case KEY_PRESSURE: + case CONTROL_CHANGE: + case PROGRAM_CHANGE: + case CHANNEL_PRESSURE: + parser->event.param1 = parser->data[0]; /* For example key number */ + parser->event.param2 = parser->data[1]; /* For example velocity */ + break; + case PITCH_BEND: + /* Pitch-bend is transmitted with 14-bit precision. */ + parser->event.param1 = (parser->data[1] << 7) | parser->data[0]; + break; + default: /* Unlikely */ + return NULL; + } + + return &parser->event; +} + +/* Purpose: + * Returns the length of a MIDI message. */ +static int +fluid_midi_event_length(unsigned char event) +{ + switch (event & 0xF0) { + case NOTE_OFF: + case NOTE_ON: + case KEY_PRESSURE: + case CONTROL_CHANGE: + case PITCH_BEND: + return 3; + case PROGRAM_CHANGE: + case CHANNEL_PRESSURE: + return 2; + } + switch (event) { + case MIDI_TIME_CODE: + case MIDI_SONG_SELECT: + case 0xF4: + case 0xF5: + return 2; + case MIDI_TUNE_REQUEST: + return 1; + case MIDI_SONG_POSITION: + return 3; + } + return 1; +} + + diff --git a/src/midi/fluid_midi.h b/src/midi/fluid_midi.h new file mode 100644 index 0000000..608ba55 --- /dev/null +++ b/src/midi/fluid_midi.h @@ -0,0 +1,389 @@ +/* FluidSynth - A Software Synthesizer + * + * Copyright (C) 2003 Peter Hanappe and others. + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public License + * as published by the Free Software Foundation; either version 2 of + * the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the Free + * Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA + * 02110-1301, USA + */ + +#ifndef _FLUID_MIDI_H +#define _FLUID_MIDI_H + +#include "fluidsynth_priv.h" +//#include "fluid_sys.h" +//#include "custom.h" +#include "fluid_list.h" +#include <stdlib.h> +#include <stdio.h> +#include <stdint.h> + +typedef struct _fluid_midi_parser_t fluid_midi_parser_t; + +fluid_midi_parser_t* new_fluid_midi_parser(void); +int delete_fluid_midi_parser(fluid_midi_parser_t* parser); +fluid_midi_event_t* fluid_midi_parser_parse(fluid_midi_parser_t* parser, unsigned char c); + +fluid_midi_event_t *new_fluid_midi_event (void); + + +/*************************************************************** + * + * CONSTANTS & ENUM + */ + + +#define MAX_NUMBER_OF_TRACKS 128 + +enum fluid_midi_event_type { + /* channel messages */ + NOTE_OFF = 0x80, + NOTE_ON = 0x90, + KEY_PRESSURE = 0xa0, + CONTROL_CHANGE = 0xb0, + PROGRAM_CHANGE = 0xc0, + CHANNEL_PRESSURE = 0xd0, + PITCH_BEND = 0xe0, + /* system exclusive */ + MIDI_SYSEX = 0xf0, + /* system common - never in midi files */ + MIDI_TIME_CODE = 0xf1, + MIDI_SONG_POSITION = 0xf2, + MIDI_SONG_SELECT = 0xf3, + MIDI_TUNE_REQUEST = 0xf6, + MIDI_EOX = 0xf7, + /* system real-time - never in midi files */ + MIDI_SYNC = 0xf8, + MIDI_TICK = 0xf9, + MIDI_START = 0xfa, + MIDI_CONTINUE = 0xfb, + MIDI_STOP = 0xfc, + MIDI_ACTIVE_SENSING = 0xfe, + MIDI_SYSTEM_RESET = 0xff, + /* meta event - for midi files only */ + MIDI_META_EVENT = 0xff +}; + +enum fluid_midi_control_change { + BANK_SELECT_MSB = 0x00, + MODULATION_MSB = 0x01, + BREATH_MSB = 0x02, + FOOT_MSB = 0x04, + PORTAMENTO_TIME_MSB = 0x05, + DATA_ENTRY_MSB = 0x06, + VOLUME_MSB = 0x07, + BALANCE_MSB = 0x08, + PAN_MSB = 0x0A, + EXPRESSION_MSB = 0x0B, + EFFECTS1_MSB = 0x0C, + EFFECTS2_MSB = 0x0D, + GPC1_MSB = 0x10, /* general purpose controller */ + GPC2_MSB = 0x11, + GPC3_MSB = 0x12, + GPC4_MSB = 0x13, + BANK_SELECT_LSB = 0x20, + MODULATION_WHEEL_LSB = 0x21, + BREATH_LSB = 0x22, + FOOT_LSB = 0x24, + PORTAMENTO_TIME_LSB = 0x25, + DATA_ENTRY_LSB = 0x26, + VOLUME_LSB = 0x27, + BALANCE_LSB = 0x28, + PAN_LSB = 0x2A, + EXPRESSION_LSB = 0x2B, + EFFECTS1_LSB = 0x2C, + EFFECTS2_LSB = 0x2D, + GPC1_LSB = 0x30, + GPC2_LSB = 0x31, + GPC3_LSB = 0x32, + GPC4_LSB = 0x33, + SUSTAIN_SWITCH = 0x40, + PORTAMENTO_SWITCH = 0x41, + SOSTENUTO_SWITCH = 0x42, + SOFT_PEDAL_SWITCH = 0x43, + LEGATO_SWITCH = 0x45, + HOLD2_SWITCH = 0x45, + SOUND_CTRL1 = 0x46, + SOUND_CTRL2 = 0x47, + SOUND_CTRL3 = 0x48, + SOUND_CTRL4 = 0x49, + SOUND_CTRL5 = 0x4A, + SOUND_CTRL6 = 0x4B, + SOUND_CTRL7 = 0x4C, + SOUND_CTRL8 = 0x4D, + SOUND_CTRL9 = 0x4E, + SOUND_CTRL10 = 0x4F, + GPC5 = 0x50, + GPC6 = 0x51, + GPC7 = 0x52, + GPC8 = 0x53, + PORTAMENTO_CTRL = 0x54, + EFFECTS_DEPTH1 = 0x5B, + EFFECTS_DEPTH2 = 0x5C, + EFFECTS_DEPTH3 = 0x5D, + EFFECTS_DEPTH4 = 0x5E, + EFFECTS_DEPTH5 = 0x5F, + DATA_ENTRY_INCR = 0x60, + DATA_ENTRY_DECR = 0x61, + NRPN_LSB = 0x62, + NRPN_MSB = 0x63, + RPN_LSB = 0x64, + RPN_MSB = 0x65, + ALL_SOUND_OFF = 0x78, + ALL_CTRL_OFF = 0x79, + LOCAL_CONTROL = 0x7A, + ALL_NOTES_OFF = 0x7B, + OMNI_OFF = 0x7C, + OMNI_ON = 0x7D, + POLY_OFF = 0x7E, + POLY_ON = 0x7F +}; + +/* General MIDI RPN event numbers (LSB, MSB = 0) */ +enum midi_rpn_event { + RPN_PITCH_BEND_RANGE = 0x00, + RPN_CHANNEL_FINE_TUNE = 0x01, + RPN_CHANNEL_COARSE_TUNE = 0x02, + RPN_TUNING_PROGRAM_CHANGE = 0x03, + RPN_TUNING_BANK_SELECT = 0x04, + RPN_MODULATION_DEPTH_RANGE = 0x05 +}; + +enum midi_meta_event { + MIDI_COPYRIGHT = 0x02, + MIDI_TRACK_NAME = 0x03, + MIDI_INST_NAME = 0x04, + MIDI_LYRIC = 0x05, + MIDI_MARKER = 0x06, + MIDI_CUE_POINT = 0x07, + MIDI_EOT = 0x2f, + MIDI_SET_TEMPO = 0x51, + MIDI_SMPTE_OFFSET = 0x54, + MIDI_TIME_SIGNATURE = 0x58, + MIDI_KEY_SIGNATURE = 0x59, + MIDI_SEQUENCER_EVENT = 0x7f +}; + +/* MIDI SYSEX useful manufacturer values */ +enum midi_sysex_manuf { + MIDI_SYSEX_MANUF_ROLAND = 0x41, /**< Roland manufacturer ID */ + MIDI_SYSEX_UNIV_NON_REALTIME = 0x7E, /**< Universal non realtime message */ + MIDI_SYSEX_UNIV_REALTIME = 0x7F /**< Universal realtime message */ +}; + +#define MIDI_SYSEX_DEVICE_ID_ALL 0x7F /**< Device ID used in SYSEX messages to indicate all devices */ + +/* SYSEX sub-ID #1 which follows device ID */ +#define MIDI_SYSEX_MIDI_TUNING_ID 0x08 /**< Sysex sub-ID #1 for MIDI tuning messages */ +#define MIDI_SYSEX_GM_ID 0x09 /**< Sysex sub-ID #1 for General MIDI messages */ + +/** + * SYSEX tuning message IDs. + */ +enum midi_sysex_tuning_msg_id { + MIDI_SYSEX_TUNING_BULK_DUMP_REQ = 0x00, /**< Bulk tuning dump request (non-realtime) */ + MIDI_SYSEX_TUNING_BULK_DUMP = 0x01, /**< Bulk tuning dump response (non-realtime) */ + MIDI_SYSEX_TUNING_NOTE_TUNE = 0x02, /**< Tuning note change message (realtime) */ + MIDI_SYSEX_TUNING_BULK_DUMP_REQ_BANK = 0x03, /**< Bulk tuning dump request (with bank, non-realtime) */ + MIDI_SYSEX_TUNING_BULK_DUMP_BANK = 0x04, /**< Bulk tuning dump resonse (with bank, non-realtime) */ + MIDI_SYSEX_TUNING_OCTAVE_DUMP_1BYTE = 0x05, /**< Octave tuning dump using 1 byte values (non-realtime) */ + MIDI_SYSEX_TUNING_OCTAVE_DUMP_2BYTE = 0x06, /**< Octave tuning dump using 2 byte values (non-realtime) */ + MIDI_SYSEX_TUNING_NOTE_TUNE_BANK = 0x07, /**< Tuning note change message (with bank, realtime/non-realtime) */ + MIDI_SYSEX_TUNING_OCTAVE_TUNE_1BYTE = 0x08, /**< Octave tuning message using 1 byte values (realtime/non-realtime) */ + MIDI_SYSEX_TUNING_OCTAVE_TUNE_2BYTE = 0x09 /**< Octave tuning message using 2 byte values (realtime/non-realtime) */ +}; + +/* General MIDI sub-ID #2 */ +#define MIDI_SYSEX_GM_ON 0x01 /**< Enable GM mode */ +#define MIDI_SYSEX_GM_OFF 0x02 /**< Disable GM mode */ + +enum fluid_driver_status +{ + FLUID_MIDI_READY, + FLUID_MIDI_LISTENING, + FLUID_MIDI_DONE +}; + +/*************************************************************** + * + * TYPE DEFINITIONS & FUNCTION DECLARATIONS + */ + +/* From ctype.h */ +#define fluid_isascii(c) (((c) & ~0x7f) == 0) + + + +/* + * fluid_midi_event_t + */ +struct _fluid_midi_event_t { + fluid_midi_event_t* next; /* Link to next event */ + void *paramptr; /* Pointer parameter (for SYSEX data), size is stored to param1, param2 indicates if pointer should be freed (dynamic if TRUE) */ + unsigned int dtime; /* Delay (ticks) between this and previous event. midi tracks. */ + unsigned int param1; /* First parameter */ + unsigned int param2; /* Second parameter */ + unsigned char type; /* MIDI event type */ + unsigned char channel; /* MIDI channel */ +}; + + +/* + * fluid_track_t + */ +struct _fluid_track_t { + char* name; + int num; + fluid_midi_event_t *first; + fluid_midi_event_t *cur; + fluid_midi_event_t *last; + unsigned int ticks; +}; + +typedef struct _fluid_track_t fluid_track_t; + +fluid_track_t* new_fluid_track(int num); +int delete_fluid_track(fluid_track_t* track); +int fluid_track_set_name(fluid_track_t* track, char* name); +char* fluid_track_get_name(fluid_track_t* track); +int fluid_track_add_event(fluid_track_t* track, fluid_midi_event_t* evt); +fluid_midi_event_t* fluid_track_first_event(fluid_track_t* track); +fluid_midi_event_t* fluid_track_next_event(fluid_track_t* track); +int fluid_track_get_duration(fluid_track_t* track); +int fluid_track_reset(fluid_track_t* track); + +int fluid_track_send_events(fluid_track_t* track, + fluid_synth_t* synth, + fluid_player_t* player, + unsigned int ticks); + +#define fluid_track_eot(track) ((track)->cur == NULL) + + +/** + * fluid_playlist_item + * Used as the `data' elements of the fluid_player.playlist. + * Represents either a filename or a pre-loaded memory buffer. + * Exactly one of `filename' and `buffer' is non-NULL. + */ +typedef struct +{ + char* filename; /** Name of file (owned); NULL if data pre-loaded */ + void* buffer; /** The MIDI file data (owned); NULL if filename */ + size_t buffer_len; /** Number of bytes in buffer; 0 if filename */ +} fluid_playlist_item; + +/* + * fluid_player + */ +struct _fluid_player_t { + int status; + int ntracks; + fluid_track_t *track[MAX_NUMBER_OF_TRACKS]; + fluid_synth_t* synth; + fluid_timer_t* system_timer; + fluid_sample_timer_t* sample_timer; + + int loop; /* -1 = loop infinitely, otherwise times left to loop the playlist */ + fluid_list_t* playlist; /* List of fluid_playlist_item* objects */ + fluid_list_t* currentfile; /* points to an item in files, or NULL if not playing */ + + char send_program_change; /* should we ignore the program changes? */ + char use_system_timer; /* if zero, use sample timers, otherwise use system clock timer */ + char reset_synth_between_songs; /* 1 if system reset should be sent to the synth between songs. */ + int start_ticks; /* the number of tempo ticks passed at the last tempo change */ + int cur_ticks; /* the number of tempo ticks passed */ + int begin_msec; /* the time (msec) of the beginning of the file */ + int start_msec; /* the start time of the last tempo change */ + int cur_msec; /* the current time */ + int miditempo; /* as indicated by MIDI SetTempo: n 24th of a usec per midi-clock. bravo! */ + double deltatime; /* milliseconds per midi tick. depends on set-tempo */ + unsigned int division; + + handle_midi_event_func_t playback_callback; /* function fired on each midi event as it is played */ + void* playback_userdata; /* pointer to user-defined data passed to playback_callback function */ +}; + +int fluid_player_add_track(fluid_player_t* player, fluid_track_t* track); +int fluid_player_callback(void* data, unsigned int msec); +int fluid_player_count_tracks(fluid_player_t* player); +fluid_track_t* fluid_player_get_track(fluid_player_t* player, int i); +int fluid_player_reset(fluid_player_t* player); +int fluid_player_load(fluid_player_t* player, fluid_playlist_item *item); + +//void fluid_player_settings(fluid_settings_t* settings); + + +/* + * fluid_midi_file + */ +//hmm no events though here, right? looks so +typedef struct { + const char* buffer; /* Entire contents of MIDI file (borrowed) */ + int buf_len; /* Length of buffer, in bytes */ + int buf_pos; /* Current read position in contents buffer */ + int eof; /* The "end of file" condition */ + int running_status; + int c; + int type; + int ntracks; + int uses_smpte; + unsigned int smpte_fps; + unsigned int smpte_res; + unsigned int division; /* If uses_SMPTE == 0 then division is + ticks per beat (quarter-note) */ + double tempo; /* Beats per second (SI rules =) */ + int tracklen; + int trackpos; + int eot; + int varlen; + int dtime; +} fluid_midi_file; + +fluid_midi_file* new_fluid_midi_file(const char* buffer, size_t length); +void delete_fluid_midi_file(fluid_midi_file* mf); +int fluid_midi_file_read_mthd(fluid_midi_file* midifile); +int fluid_midi_file_load_tracks(fluid_midi_file* midifile, fluid_player_t* player); +int fluid_midi_file_read_track(fluid_midi_file* mf, fluid_player_t* player, int num); +int fluid_midi_file_read_event(fluid_midi_file* mf, fluid_track_t* track); +int fluid_midi_file_read_varlen(fluid_midi_file* mf); +int fluid_midi_file_getc(fluid_midi_file* mf); +int fluid_midi_file_push(fluid_midi_file* mf, int c); +int fluid_midi_file_read(fluid_midi_file* mf, void* buf, int len); +int fluid_midi_file_skip(fluid_midi_file* mf, int len); +int fluid_midi_file_eof(fluid_midi_file* mf); +int fluid_midi_file_read_tracklen(fluid_midi_file* mf); +int fluid_midi_file_eot(fluid_midi_file* mf); +int fluid_midi_file_get_division(fluid_midi_file* midifile); + + +#define FLUID_MIDI_PARSER_MAX_DATA_SIZE 1024 /**< Maximum size of MIDI parameters/data (largest is SYSEX data) */ + +/* + * fluid_midi_parser_t + */ +struct _fluid_midi_parser_t { + unsigned char status; /* Identifies the type of event, that is currently received ('Noteon', 'Pitch Bend' etc). */ + unsigned char channel; /* The channel of the event that is received (in case of a channel event) */ + unsigned int nr_bytes; /* How many bytes have been read for the current event? */ + unsigned int nr_bytes_total; /* How many bytes does the current event type include? */ + unsigned char data[FLUID_MIDI_PARSER_MAX_DATA_SIZE]; /* The parameters or SYSEX data */ + fluid_midi_event_t event; /* The event, that is returned to the MIDI driver. */ +}; + +int fluid_isasciistring(char* s); +long fluid_getlength(unsigned char *s); + + +#endif /* _FLUID_MIDI_H */ diff --git a/src/midi/fluid_midi.lo b/src/midi/fluid_midi.lo new file mode 100644 index 0000000..09c84c0 --- /dev/null +++ b/src/midi/fluid_midi.lo @@ -0,0 +1,12 @@ +# fluid_midi.lo - a libtool object file +# Generated by libtool (GNU libtool) 2.4.2 +# +# Please DO NOT delete this file! +# It is necessary for linking the library. + +# Name of the PIC object. +pic_object='.libs/fluid_midi.o' + +# Name of the non-PIC object +non_pic_object='fluid_midi.o' + diff --git a/src/midi/fluid_midi.o b/src/midi/fluid_midi.o Binary files differnew file mode 100644 index 0000000..4460e8d --- /dev/null +++ b/src/midi/fluid_midi.o diff --git a/src/midi/fluid_midi_custom.h b/src/midi/fluid_midi_custom.h new file mode 100644 index 0000000..8712b44 --- /dev/null +++ b/src/midi/fluid_midi_custom.h @@ -0,0 +1,23 @@ +typedef struct _fluid_list_t fluid_list_t; + +typedef struct _fluid_midi_event_t fluid_midi_event_t; +typedef struct _fluid_player_t fluid_player_t; +typedef struct _fluid_track_t fluid_track_t; +typedef struct _fluid_synth_t fluid_synth_t; +typedef struct _fluid_timer_t fluid_timer_t; +typedef struct _fluid_sample_timer_t fluid_sample_timer_t; +typedef struct _fluid_settings_t fluid_settings_t; + + +#define FLUID_DBG 0 +#define FLUID_OK 1 +#define FLUID_ERR 0 +#define FLUID_FAILED 2 +#define FLUID_PANIC 3 +#define FLUID_PLAYER_DONE 2 +#define FLUID_PLAYER_PLAYING 1 +#define FLUID_PLAYER_READY 3 +#define FLUID_HINT_TOGGLED 0x01 +#define TRUE 1 +#define FALSE 0 +typedef int (*handle_midi_event_func_t)(void* data, fluid_midi_event_t* event); diff --git a/src/midi/fluidsynth_priv.h b/src/midi/fluidsynth_priv.h new file mode 100644 index 0000000..93f6d32 --- /dev/null +++ b/src/midi/fluidsynth_priv.h @@ -0,0 +1,281 @@ +/* FluidSynth - A Software Synthesizer + * + * Copyright (C) 2003 Peter Hanappe and others. + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public License + * as published by the Free Software Foundation; either version 2 of + * the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the Free + * Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA + * 02110-1301, USA + */ + + +#ifndef _FLUIDSYNTH_PRIV_H +#define _FLUIDSYNTH_PRIV_H + +#include "fluid_midi_custom.h" +#include "midi_loader.h" +//#include <glib.h> +#include <stdint.h> +#include <stdio.h> +#include <stdlib.h> +#include <string.h> + +#if HAVE_CONFIG_H +#include "config.h" +#endif + +#if defined(__POWERPC__) && !(defined(__APPLE__) && defined(__MACH__)) +#include "config_maxmsp43.h" +#endif + +#if defined(WIN32) && !defined(MINGW32) +#include "config_win32.h" +#endif + +#if HAVE_STRING_H +#include <string.h> +#endif + +#if HAVE_STDLIB_H +#include <stdlib.h> +#endif + +#if HAVE_STDIO_H +#include <stdio.h> +#endif + +#if HAVE_MATH_H +#include <math.h> +#endif + +#if HAVE_ERRNO_H +#include <errno.h> +#endif + +#if HAVE_STDARG_H +#include <stdarg.h> +#endif + +#if HAVE_UNISTD_H +#include <unistd.h> +#endif + +#if HAVE_FCNTL_H +#include <fcntl.h> +#endif + +#if HAVE_SYS_MMAN_H +#include <sys/mman.h> +#endif + +#if HAVE_SYS_TYPES_H +#include <sys/types.h> +#endif + +#if HAVE_SYS_STAT_H +#include <sys/stat.h> +#endif + +#if HAVE_SYS_TIME_H +#include <sys/time.h> +#endif + +#if HAVE_SYS_SOCKET_H +#include <sys/socket.h> +#endif + +#if HAVE_NETINET_IN_H +#include <netinet/in.h> +#endif + +#if HAVE_NETINET_TCP_H +#include <netinet/tcp.h> +#endif + +#if HAVE_ARPA_INET_H +#include <arpa/inet.h> +#endif + +#if HAVE_LIMITS_H +#include <limits.h> +#endif + +#if HAVE_PTHREAD_H +#include <pthread.h> +#endif + +#if HAVE_IO_H +#include <io.h> +#endif + +#if HAVE_WINDOWS_H +#include <winsock2.h> +#include <ws2tcpip.h> +#include <windows.h> +#endif + +/* MinGW32 special defines */ +#ifdef MINGW32 + +#include <stdint.h> +#define snprintf _snprintf +#define vsnprintf _vsnprintf + +#define DSOUND_SUPPORT 1 +#define WINMIDI_SUPPORT 1 +#define STDIN_FILENO 0 +#define STDOUT_FILENO 1 +#define STDERR_FILENO 2 + +#endif + +/* Darwin special defines (taken from config_macosx.h) */ +#ifdef DARWIN +#define MACINTOSH +#define __Types__ +#define WITHOUT_SERVER 1 +#endif + + +//#include "fluidsynth.h" + + +/*************************************************************** + * + * BASIC TYPES + */ + +#if defined(WITH_FLOAT) +typedef float fluid_real_t; +#else +typedef double fluid_real_t; +#endif + + +#if defined(WIN32) +typedef SOCKET fluid_socket_t; +#else +typedef int fluid_socket_t; +#define INVALID_SOCKET -1 +#endif + +#if defined(SUPPORTS_VLA) +# define FLUID_DECLARE_VLA(_type, _name, _len) \ + _type _name[_len] +#else +# define FLUID_DECLARE_VLA(_type, _name, _len) \ + _type* _name = g_newa(_type, (_len)) +#endif + + +/** Integer types */ +//typedef int8_t sint8_t; +typedef uint8_t uint8_t; +//typedef int16 sint16; +//typedef uint16 uint16; +typedef int32_t sint32_t; +typedef uint32_t uint32_t; +//typedef int64 sint64; +//typedef uint64 uint64; + + +/*************************************************************** + * + * FORWARD DECLARATIONS + */ +typedef struct _fluid_env_data_t fluid_env_data_t; +typedef struct _fluid_adriver_definition_t fluid_adriver_definition_t; +typedef struct _fluid_channel_t fluid_channel_t; +typedef struct _fluid_tuning_t fluid_tuning_t; +typedef struct _fluid_hashtable_t fluid_hashtable_t; +typedef struct _fluid_client_t fluid_client_t; +typedef struct _fluid_server_socket_t fluid_server_socket_t; +typedef struct _fluid_sample_timer_t fluid_sample_timer_t; + +/*************************************************************** + * + * CONSTANTS + */ + +#define FLUID_BUFSIZE 64 /**< FluidSynth internal buffer size (in samples) */ +#define FLUID_MAX_EVENTS_PER_BUFSIZE 1024 /**< Maximum queued MIDI events per #FLUID_BUFSIZE */ +#define FLUID_MAX_RETURN_EVENTS 1024 /**< Maximum queued synthesis thread return events */ +#define FLUID_MAX_EVENT_QUEUES 16 /**< Maximum number of unique threads queuing events */ +#define FLUID_DEFAULT_AUDIO_RT_PRIO 60 /**< Default setting for audio.realtime-prio */ +#define FLUID_DEFAULT_MIDI_RT_PRIO 50 /**< Default setting for midi.realtime-prio */ + +#ifndef PI +#define PI 3.141592654 +#endif + +/*************************************************************** + * + * SYSTEM INTERFACE + */ +typedef FILE* fluid_file; + +#define FLUID_MALLOC(_n) malloc(_n) +#define FLUID_REALLOC(_p,_n) realloc(_p,_n) +#define FLUID_NEW(_t) (_t*)malloc(sizeof(_t)) +#define FLUID_ARRAY(_t,_n) (_t*)malloc((_n)*sizeof(_t)) +#define FLUID_FREE(_p) free(_p) +#define FLUID_FOPEN(_f,_m) fopen(_f,_m) +#define FLUID_FCLOSE(_f) fclose(_f) +#define FLUID_FREAD(_p,_s,_n,_f) fread(_p,_s,_n,_f) +#define FLUID_FSEEK(_f,_n,_set) fseek(_f,_n,_set) +#define FLUID_MEMCPY(_dst,_src,_n) memcpy(_dst,_src,_n) +#define FLUID_MEMSET(_s,_c,_n) memset(_s,_c,_n) +#define FLUID_STRLEN(_s) strlen(_s) +#define FLUID_STRCMP(_s,_t) strcmp(_s,_t) +#define FLUID_STRNCMP(_s,_t,_n) strncmp(_s,_t,_n) +#define FLUID_STRCPY(_dst,_src) strcpy(_dst,_src) +#define FLUID_STRNCPY(_dst,_src,_n) strncpy(_dst,_src,_n) +#define FLUID_STRCHR(_s,_c) strchr(_s,_c) +#define FLUID_STRRCHR(_s,_c) strrchr(_s,_c) +#ifdef strdup +#define FLUID_STRDUP(s) strdup(s) +#else +#define FLUID_STRDUP(s) FLUID_STRCPY(FLUID_MALLOC(FLUID_STRLEN(s) + 1), s) +#endif +#define FLUID_SPRINTF sprintf +#define FLUID_FPRINTF fprintf + +#define fluid_clip(_val, _min, _max) \ +{ (_val) = ((_val) < (_min))? (_min) : (((_val) > (_max))? (_max) : (_val)); } + +#if WITH_FTS +#define FLUID_PRINTF post +#define FLUID_FLUSH() +#else +#define FLUID_PRINTF printf +#define FLUID_FLUSH() fflush(stdout) +#endif + +#define FLUID_LOG fluid_log + +#ifndef M_PI +#define M_PI 3.1415926535897932384626433832795 +#endif + + +#define FLUID_ASSERT(a,b) +#define FLUID_ASSERT_P(a,b) + +char* fluid_error(void); + + +/* Internationalization */ +#define _(s) s + + +#endif /* _FLUIDSYNTH_PRIV_H */ diff --git a/src/midi/libfluidmidi.la b/src/midi/libfluidmidi.la new file mode 100644 index 0000000..8da7fe7 --- /dev/null +++ b/src/midi/libfluidmidi.la @@ -0,0 +1,41 @@ +# libfluidmidi.la - a libtool library file +# Generated by libtool (GNU libtool) 2.4.2 +# +# Please DO NOT delete this file! +# It is necessary for linking the library. + +# The name that we can dlopen(3). +dlname='' + +# Names of this library. +library_names='' + +# The name of the static archive. +old_library='libfluidmidi.a' + +# Linker flags that can not go in dependency_libs. +inherited_linker_flags='' + +# Libraries that this one depends upon. +dependency_libs='' + +# Names of additional weak libraries provided by this library +weak_library_names='' + +# Version information for libfluidmidi. +current= +age= +revision= + +# Is this an already installed library? +installed=no + +# Should we warn about portability when linking against -modules? +shouldnotlink=no + +# Files to dlopen/dlpreopen +dlopen='' +dlpreopen='' + +# Directory that this library needs to be installed in: +libdir='' diff --git a/src/midi/midi_loader.c b/src/midi/midi_loader.c new file mode 100644 index 0000000..2cdd539 --- /dev/null +++ b/src/midi/midi_loader.c @@ -0,0 +1,71 @@ +#include "midi_loader.h"
+
+//static int event_count = 0;
+size_t last_msec = 0;
+size_t nmsecs_since_last = 0;
+
+void print_event(fluid_midi_event_t *event, size_t current_msec){
+// {{{ DESCRIPTION
+// fluid_midi_event_t* next; /* Link to next event */
+// void *paramptr; /* Pointer parameter (for SYSEX data), size is stored to param1, param2 indicates if pointer should be freed (dynamic if TRUE) */
+// unsigned int dtime; /* Delay (ticks) between this and previous event. midi tracks. */
+// unsigned int param1; /* First parameter */
+// unsigned int param2; /* Second parameter */
+// unsigned char type; /* MIDI event type */
+// unsigned char channel; /* MIDI channel */
+//}}}
+// printf("EVENT_COUNT: %d\n", event_count);
+ printf("dtime:%u ", event->dtime);
+ printf("param1:%u ", event->param1);
+ printf("param2: %u ", event->param2);
+ printf("type: %x ", event->type);
+ printf("channel: %u ", event->channel);
+ printf("nframe: %u ", current_msec);
+ puts("\n");
+}
+
+int get_events(void *data, fluid_midi_event_t *event){
+ //this function is called every time a new event comes in
+ read_midi_ctx_t *ctx = (read_midi_ctx_t *)data;
+ fluid_player_t *player = ctx->player;
+ fluid_track_t *track = ctx->track;
+ read_midi_callback cb = ctx->callback;
+ size_t current_msec;
+
+// event_count++;
+ current_msec = (player->deltatime * track->ticks);
+ nmsecs_since_last = current_msec - last_msec;
+ last_msec = current_msec;
+
+// print_event(event, current_msec);
+ //process_midi_cb execution...
+ cb(event, nmsecs_since_last, ctx->callback_userdata); // seems good, check the output ok
+//ok I'm going to study this, make a git back it up clean it up etc tomorrow, but I think I understand. this is
+//definitely coming along. thanks so much for the help no problems alright awesome, I'll talk to you and send you a payment again soon thanks again
+}
+
+void load_midi_file(char *filename, read_midi_callback callback, void *callback_userdata){
+ fluid_playlist_item playlist_item;
+ playlist_item.filename = filename;
+
+ fluid_player_t *player;
+ player = (fluid_player_t *)new_fluid_player();
+ player->playback_callback = &get_events;
+
+
+ read_midi_ctx_t ctx;
+ ctx.player = player;
+ ctx.callback = callback;
+ ctx.callback_userdata = callback_userdata;
+
+ player->playback_userdata = (void *)&ctx;
+ fluid_player_load(player, &playlist_item);
+
+ int i;
+ for(i = 0; i < player->ntracks; i++){
+ ctx.track = player->track[i];//tracks...when there is more than one song in a file
+ fluid_track_send_events(player->track[i], player->synth, player, (unsigned int)-1);
+ }
+
+ delete_fluid_player(player);
+}
diff --git a/src/midi/midi_loader.h b/src/midi/midi_loader.h new file mode 100644 index 0000000..57cee25 --- /dev/null +++ b/src/midi/midi_loader.h @@ -0,0 +1,46 @@ +#ifndef MIDI_LOADER_H +#define MIDI_LOADER_H + +#include "fluid_list.h" +#include "fluidsynth_priv.h" //is this the right idea? just need to load libs in the right order? yep +#include "fluid_midi.h" +#include <sndfile.h> +#include <string.h> +#include <ladspa.h> +#include <dssi.h> +//load all of those in the same way as below? or is the problem with how fluid_midi is accessing this file? yeah basically order issue + + +typedef struct event_table_t{ + snd_seq_event_t *events; + size_t length; + size_t last_nframe; + size_t nframes_since_last; +} event_table_t; + +//typedef void(*read_midi_callback)(event_table_t *event_table, void *userdata); +typedef void(*read_midi_callback)(fluid_midi_event_t *event, size_t msecs_since_last, void *userdata); + + + +typedef struct read_midi_ctx_t { + fluid_player_t *player; + fluid_track_t *track; + read_midi_callback callback; + void *callback_userdata; +} read_midi_ctx_t; + + +//void insert_event(event_table_t *event_table, snd_seq_event_t *event){ +//void delete_event(event_table_t *event_table, snd_seq_event_t *event){ +//int compare_events(snd_seq_event_t *event1, snd_seq_event_t *event2){ +//void replace_events(event_table_t *event_table, snd_seq_event_t *event){ +//like this? +//void convert_event_format(fluid_midi_event_t *from, snd_seq_event_t *to); +//void print_snd_seq_event(snd_seq_event_t *event); +//void print_event_table (event_table_t *event_table); +//int get_events(void *data, fluid_midi_event_t *event); + +void print_snd_seq_event(snd_seq_event_t *event); +void load_midi_file(char *filename, read_midi_callback callback, void *callback_userdata); +#endif diff --git a/src/midi/midi_loader_test.c b/src/midi/midi_loader_test.c new file mode 100644 index 0000000..d79392d --- /dev/null +++ b/src/midi/midi_loader_test.c @@ -0,0 +1,240 @@ +#include "midi_loader.h" + +event_table_t *event_table; + +//{{{ TO REMOVE...event table functions +void insert_event(event_table_t *event_table, snd_seq_event_t *event){ + //inserts an event into the event table + event_table->events = realloc(event_table->events, (event_table->length + 1) * sizeof(snd_seq_event_t)); + memcpy(&event_table->events[event_table->length], event, sizeof(snd_seq_event_t)); + event_table->length += 1; +} + +void delete_event(event_table_t *event_table, snd_seq_event_t *event){ + //deletes an event in the event table + size_t i; + for (i=0; i< event_table->length; i++){ + if(compare_events(&event_table->events[i], event)){ + printf("removed_event\n"); + memcpy(&event_table->events[i], &event_table->events[i+1], sizeof(snd_seq_event_t)*(event_table->length - i -1)); + event_table->events = realloc(event_table->events, event_table->length * sizeof(snd_seq_event_t)); + event_table->length--; + i--; + } + } +} + +void delete_note_off_events(event_table_t *event_table){ + //removes note_off events after they have happened from the event table + size_t i; + size_t i; + for (i=0; i< event_table->length; i++){ + if(event_table->events[i].type == SND_SEQ_EVENT_NOTEOFF){ + printf("removed_note_off_event\n"); + memcpy(&event_table->events[i], &event_table->events[i+1], sizeof(snd_seq_event_t)*(event_table->length - i -1)); + event_table->events = realloc(event_table->events, event_table->length * sizeof(snd_seq_event_t)); + event_table->length--; + i--; + } + } +} + +int compare_events(snd_seq_event_t *event1, snd_seq_event_t *event2){ + //compares events in the event table + return ( + (event1->data.note.note == event2->data.note.note) && + (event1->data.note.channel == event2->data.note.channel) + ) ? 1 : 0; +} + + +void replace_events(event_table_t *event_table, snd_seq_event_t *event){ + //replaces events in the event table + size_t i; + for (i=0; i< event_table->length; i++){ + if(compare_events(&event_table->events[i], event)){ + printf("replaced_event\n"); + memcpy(&event_table->events[i], event, sizeof(snd_seq_event_t)); + } + } +} + +void print_event_table (event_table_t *event_table){ + unsigned int i; + for(i=0; i< event_table->length; i++){ + printf(" - %d: ", i + 1); + print_snd_seq_event(&event_table->events[i]); + + } + printf("--\n"); +} +//}}} + +void convert_event_format(fluid_midi_event_t *from, snd_seq_event_t *to){ + memset(to, 0, sizeof(snd_seq_event_t)); +//{{{ from->type + switch(from->type){ + + case NOTE_ON: + to->type = SND_SEQ_EVENT_NOTEON; + to->data.note.note = from->param1; + to->data.note.velocity = from->param2; + break; + case NOTE_OFF: + to->type = SND_SEQ_EVENT_NOTEOFF; + to->data.note.note = from->param1; + to->data.note.off_velocity = from->param2; + break; + /*case FLUID_NONE: to->type = SND_SEQ_EVENT_SYSTEM; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_RESULT; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_NOTE; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_KEYPRESS; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_CONTROLLER; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_PGMCHANGE; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_CHANPRESS; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_PITCHBEND; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_CONTROL14; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_NONREGPARAM; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_REGPARAM; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_SONGPOS; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_SONGSEL; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_QFRAME; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_TIMESIGN; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_KEYSIGN; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_START; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_CONTINUE; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_STOP; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_SETPOS_TICK; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_SETPOS_TIME; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_TEMPO; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_CLOCK; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_TICK; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_QUEUE_SKEW; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_SYNC_POS; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_TUNE_REQUEST; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_RESET; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_SENSING; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_ECHO; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_OSS; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_CLIENT_START; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_CLIENT_EXIT; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_CLIENT_CHANGE; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_PORT_START; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_PORT_EXIT; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_PORT_CHANGE; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_PORT_SUBSCRIBED; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_PORT_UNSUBSCRIBED; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_USR0; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_USR1; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_USR2; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_USR3; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_USR4; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_USR5; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_USR6; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_USR7; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_USR8; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_USR9; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_SYSEX; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_BOUNCE; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_USR_VAR0; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_USR_VAR1; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_USR_VAR2; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_USR_VAR3; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_USR_VAR4; break; + case FLUID_NONE: to->type = SND_SEQ_EVENT_NONE; break; +*/ + } +//}}} + //to->data.note.channel = from->channel; + to->data.note.channel = 0; // FIXME force channel + to->time.tick = 0; +} + +void print_snd_seq_event(snd_seq_event_t *event){ + char note_event[20]; + switch(event->type){ + + case SND_SEQ_EVENT_NOTEON: + strcpy(note_event,"NOTE_ON"); + break; + case SND_SEQ_EVENT_NOTEOFF: + strcpy(note_event,"NOTE_OFF"); + break; + break; + } + printf("event_type: %s", note_event); + printf("channel: %d ", event->data.note.channel); + printf("note: %d ", event->data.note.note); + printf("velocity: %d ", event->data.note.velocity); + printf("tick: %d ", event->time.tick); + printf("\n"); +} + + +int get_events(void *data, fluid_midi_event_t *event){ + read_midi_ctx_t *ctx = (read_midi_ctx_t *)data; + fluid_player_t *player = ctx->player; + fluid_track_t *track = ctx->track; + snd_seq_event_t seq_event; + + size_t last_nframe = event_table->last_nframe; + event_table->last_nframe = (player->deltatime * track->ticks) * 44100 / 1000; // FIXME 44100 to ctx->samplerate + event_table->nframes_since_last = event_table->last_nframe - last_nframe; + + convert_event_format(event, &seq_event); + + read_midi_callback cb = ctx->callback; + if(cb){ + cb(event_table, ctx->callback_userdata); + } + delete_note_off_events(event_table); + + switch(event->type){ + case NOTE_ON: + insert_event(event_table, &seq_event); + break; + case NOTE_OFF: + replace_events(event_table, &seq_event); + break; + default: + break; + } +#define DEBUG_MIDI 0 + + if(DEBUG_MIDI){ + printf("event table last nframe: %u\n", event_table->last_nframe); + printf("run_synth(instancehandle, %u,\n", event_table->nframes_since_last); + print_event_table(event_table); + printf(", %u)\n", event_table->length); + } + +} + + +void load_midi_file(char *filename, read_midi_callback callback, void *callback_userdata){ + int i; + fluid_player_t *player; + fluid_playlist_item playlist_item; + read_midi_ctx_t ctx; + + event_table = malloc(sizeof (event_table_t)); + event_table->events = NULL; + event_table->length = 0; + event_table->last_nframe = 0; + event_table->nframes_since_last = 0; + playlist_item.filename = filename; + player = (fluid_player_t *)new_fluid_player(); + player->playback_callback = &get_events; + player->playback_userdata = (void *)&ctx; + ctx.player = player; + ctx.callback = callback; + ctx.callback_userdata = callback_userdata; + fluid_player_load(player, &playlist_item); + + for(i = 0; i < player->ntracks; i++){ + ctx.track = player->track[i]; + fluid_track_send_events(player->track[i], player->synth, player, (unsigned int)-1); + } + + delete_fluid_player(player); +} diff --git a/src/state.c b/src/state.c new file mode 100644 index 0000000..5afbb24 --- /dev/null +++ b/src/state.c @@ -0,0 +1,224 @@ +/* + Copyright 2007-2014 David Robillard <http://drobilla.net> + + Permission to use, copy, modify, and/or distribute this software for any + purpose with or without fee is hereby granted, provided that the above + copyright notice and this permission notice appear in all copies. + + THIS SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. +*/ + +#define _POSIX_C_SOURCE 200112L /* for fileno */ +#define _BSD_SOURCE 1 /* for lockf */ + +#include <assert.h> +#include <stdio.h> +#include <stdlib.h> +#include <string.h> +#include <errno.h> +#include <unistd.h> +#include <sys/stat.h> +#include <sys/types.h> + +#ifdef HAVE_LV2_STATE +# include "lv2/lv2plug.in/ns/ext/state/state.h" +#endif + +#include "lilv/lilv.h" + +#include "jalv_config.h" +#include "LV2-render_internal.h" + +#define NS_JALV "http://drobilla.net/ns/jalv#" +#define NS_RDF "http://www.w3.org/1999/02/22-rdf-syntax-ns#" +#define NS_RDFS "http://www.w3.org/2000/01/rdf-schema#" +#define NS_XSD "http://www.w3.org/2001/XMLSchema#" + +#define USTR(s) ((const uint8_t*)s) + +char* +jalv_make_path(LV2_State_Make_Path_Handle handle, + const char* path) +{ + Jalv* jalv = (Jalv*)handle; + + // Create in save directory if saving, otherwise use temp directory + const char* dir = (jalv->save_dir) ? jalv->save_dir : jalv->temp_dir; + + char* fullpath = jalv_strjoin(dir, path); + fprintf(stderr, "MAKE PATH `%s' => `%s'\n", path, fullpath); + + return fullpath; +} + +static const void* +get_port_value(const char* port_symbol, + void* user_data, + uint32_t* size, + uint32_t* type) +{ + Jalv* jalv = (Jalv*)user_data; + struct Port* port = jalv_port_by_symbol(jalv, port_symbol); + if (port && port->flow == FLOW_INPUT && port->type == TYPE_CONTROL) { + *size = sizeof(float); + *type = jalv->forge.Float; + return &port->control; + } + *size = *type = 0; + return NULL; +} + +void +jalv_save(Jalv* jalv, const char* dir) +{ + jalv->save_dir = jalv_strjoin(dir, "/"); + + LilvState* const state = lilv_state_new_from_instance( + jalv->plugin, jalv->instance, &jalv->map, + jalv->temp_dir, dir, dir, dir, + get_port_value, jalv, + LV2_STATE_IS_POD|LV2_STATE_IS_PORTABLE, NULL); + + lilv_state_save(jalv->world, &jalv->map, &jalv->unmap, state, NULL, + dir, "state.ttl"); + + lilv_state_free(state); + + free(jalv->save_dir); + jalv->save_dir = NULL; +} + +int +jalv_load_presets(Jalv* jalv, PresetSink sink, void* data) +{ + LilvNodes* presets = lilv_plugin_get_related(jalv->plugin, + jalv->nodes.pset_Preset); + LILV_FOREACH(nodes, i, presets) { + const LilvNode* preset = lilv_nodes_get(presets, i); + printf("Preset: %s\n", lilv_node_as_uri(preset)); + lilv_world_load_resource(jalv->world, preset); + LilvNodes* labels = lilv_world_find_nodes( + jalv->world, preset, jalv->nodes.rdfs_label, NULL); + if (labels) { + const LilvNode* label = lilv_nodes_get_first(labels); + sink(jalv, preset, label, data); + lilv_nodes_free(labels); + } else { + fprintf(stderr, "Preset <%s> has no rdfs:label\n", + lilv_node_as_string(lilv_nodes_get(presets, i))); + } + } + lilv_nodes_free(presets); + + return 0; +} + +int +jalv_unload_presets(Jalv* jalv) +{ + LilvNodes* presets = lilv_plugin_get_related(jalv->plugin, + jalv->nodes.pset_Preset); + LILV_FOREACH(nodes, i, presets) { + const LilvNode* preset = lilv_nodes_get(presets, i); + lilv_world_unload_resource(jalv->world, preset); + } + lilv_nodes_free(presets); + + return 0; +} + +static void +set_port_value(const char* port_symbol, + void* user_data, + const void* value, + uint32_t size, + uint32_t type) +{ + Jalv* jalv = (Jalv*)user_data; + struct Port* port = jalv_port_by_symbol(jalv, port_symbol); + if (!port) { + fprintf(stderr, "error: Preset port `%s' is missing\n", port_symbol); + return; + } + + float fvalue; + if (type == jalv->forge.Float) { + fvalue = *(const float*)value; + } else if (type == jalv->forge.Double) { + fvalue = *(const double*)value; + } else if (type == jalv->forge.Int) { + fvalue = *(const int32_t*)value; + } else if (type == jalv->forge.Long) { + fvalue = *(const int64_t*)value; + } else { + fprintf(stderr, "error: Preset `%s' value has bad type <%s>\n", + port_symbol, jalv->unmap.unmap(jalv->unmap.handle, type)); + return; + } + + if (jalv->play_state != JALV_RUNNING) { + // Set value on port struct directly + port->control = fvalue; + } + +} + +void +jalv_apply_state(Jalv* jalv, LilvState* state) +{ + if (state) { + const bool must_pause = (jalv->play_state == JALV_RUNNING); + if (must_pause) { + jalv->play_state = JALV_PAUSE_REQUESTED; + zix_sem_wait(&jalv->paused); + } + + lilv_state_restore( + state, jalv->instance, set_port_value, jalv, 0, NULL); + + if (must_pause) { + jalv->play_state = JALV_RUNNING; + } + } +} + +int +jalv_apply_preset(Jalv* jalv, const LilvNode* preset) +{ + LilvState* state = lilv_state_new_from_world( + jalv->world, &jalv->map, preset); + jalv_apply_state(jalv, state); + lilv_state_free(state); + return 0; +} + +int +jalv_save_preset(Jalv* jalv, + const char* dir, + const char* uri, + const char* label, + const char* filename) +{ + LilvState* const state = lilv_state_new_from_instance( + jalv->plugin, jalv->instance, &jalv->map, + jalv->temp_dir, dir, dir, dir, + get_port_value, jalv, + LV2_STATE_IS_POD|LV2_STATE_IS_PORTABLE, NULL); + + if (label) { + lilv_state_set_label(state, label); + } + + int ret = lilv_state_save( + jalv->world, &jalv->map, &jalv->unmap, state, uri, dir, filename); + + lilv_state_free(state); + + return ret; +} diff --git a/src/symap.c b/src/symap.c new file mode 100644 index 0000000..40c8980 --- /dev/null +++ b/src/symap.c @@ -0,0 +1,231 @@ +/* + Copyright 2011-2014 David Robillard <http://drobilla.net> + + Permission to use, copy, modify, and/or distribute this software for any + purpose with or without fee is hereby granted, provided that the above + copyright notice and this permission notice appear in all copies. + + THIS SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. +*/ + +#include <assert.h> +#include <stdbool.h> +#include <stdlib.h> +#include <string.h> + +#include "symap.h" + +/** + @file symap.c Implementation of Symap, a basic symbol map (string interner). + + This implementation is primitive, but has some desirable qualities: good + (O(lg(n)) lookup performance for already-mapped symbols, minimal space + overhead, extremely fast (O(1)) reverse mapping (ID to string), simple code, + no dependencies. + + The tradeoff is that mapping new symbols may be quite slow. In other words, + this implementation is ideal for use cases with a relatively limited set of + symbols, or where most symbols are mapped early. It will not fare so well + with very dynamic sets of symbols. For that, you're better off with a + tree-based implementation (and the associated space cost, especially if you + need reverse mapping). +*/ + +struct SymapImpl { + /** + Unsorted array of strings, such that the symbol for ID i is found + at symbols[i - 1]. + */ + char** symbols; + + /** + Array of IDs, sorted by corresponding string in `symbols`. + */ + uint32_t* index; + + /** + Number of symbols (number of items in `symbols` and `index`). + */ + uint32_t size; +}; + +Symap* +symap_new(void) +{ + Symap* map = (Symap*)malloc(sizeof(Symap)); + map->symbols = NULL; + map->index = NULL; + map->size = 0; + return map; +} + +void +symap_free(Symap* map) +{ + for (uint32_t i = 0; i < map->size; ++i) { + free(map->symbols[i]); + } + + free(map->symbols); + free(map->index); + free(map); +} + +static char* +symap_strdup(const char* str) +{ + const size_t len = strlen(str); + char* copy = (char*)malloc(len + 1); + memcpy(copy, str, len + 1); + return copy; +} + +/** + Return the index into map->index (not the ID) corresponding to `sym`, + or the index where a new entry for `sym` should be inserted. +*/ +static uint32_t +symap_search(const Symap* map, const char* sym, bool* exact) +{ + *exact = false; + if (map->size == 0) { + return 0; // Empty map, insert at 0 + } else if (strcmp(map->symbols[map->index[map->size - 1] - 1], sym) < 0) { + return map->size; // Greater than last element, append + } + + uint32_t lower = 0; + uint32_t upper = map->size - 1; + uint32_t i = upper; + int cmp; + + while (upper >= lower) { + i = lower + ((upper - lower) / 2); + cmp = strcmp(map->symbols[map->index[i] - 1], sym); + + if (cmp == 0) { + *exact = true; + return i; + } else if (cmp > 0) { + if (i == 0) { + break; // Avoid underflow + } + upper = i - 1; + } else { + lower = ++i; + } + } + + assert(!*exact || strcmp(map->symbols[map->index[i] - 1], sym) > 0); + return i; +} + +uint32_t +symap_try_map(Symap* map, const char* sym) +{ + bool exact; + const uint32_t index = symap_search(map, sym, &exact); + if (exact) { + assert(!strcmp(map->symbols[map->index[index]], sym)); + return map->index[index]; + } + + return 0; +} + +uint32_t +symap_map(Symap* map, const char* sym) +{ + bool exact; + const uint32_t index = symap_search(map, sym, &exact); + if (exact) { + assert(!strcmp(map->symbols[map->index[index] - 1], sym)); + return map->index[index]; + } + + const uint32_t id = ++map->size; + char* const str = symap_strdup(sym); + + /* Append new symbol to symbols array */ + map->symbols = (char**)realloc(map->symbols, map->size * sizeof(str)); + map->symbols[id - 1] = str; + + /* Insert new index element into sorted index */ + map->index = (uint32_t*)realloc(map->index, map->size * sizeof(uint32_t)); + if (index < map->size - 1) { + memmove(map->index + index + 1, + map->index + index, + (map->size - index - 1) * sizeof(uint32_t)); + } + + map->index[index] = id; + + return id; +} + +const char* +symap_unmap(Symap* map, uint32_t id) +{ + if (id == 0) { + return NULL; + } else if (id <= map->size) { + return map->symbols[id - 1]; + } + return NULL; +} + +#ifdef STANDALONE + +#include <stdio.h> + +static void +symap_dump(Symap* map) +{ + fprintf(stderr, "{\n"); + for (uint32_t i = 0; i < map->size; ++i) { + fprintf(stderr, "\t%u = %s\n", + map->index[i], map->symbols[map->index[i] - 1]); + } + fprintf(stderr, "}\n"); +} + +int +main() +{ + #define N_SYMS 5 + char* syms[N_SYMS] = { + "hello", "bonjour", "goodbye", "aloha", "salut" + }; + + Symap* map = symap_new(); + for (int i = 0; i < N_SYMS; ++i) { + if (symap_try_map(map, syms[i])) { + fprintf(stderr, "error: Symbol already mapped\n"); + return 1; + } + + const uint32_t id = symap_map(map, syms[i]); + if (strcmp(map->symbols[id - 1], syms[i])) { + fprintf(stderr, "error: Corrupt symbol table\n"); + return 1; + } + + if (symap_map(map, syms[i]) != id) { + fprintf(stderr, "error: Remapped symbol to a different ID\n"); + return 1; + } + + symap_dump(map); + } + + symap_free(map); + return 0; +} + +#endif /* STANDALONE */ diff --git a/src/symap.h b/src/symap.h new file mode 100644 index 0000000..79de8ff --- /dev/null +++ b/src/symap.h @@ -0,0 +1,69 @@ +/* + Copyright 2011-2012 David Robillard <http://drobilla.net> + + Permission to use, copy, modify, and/or distribute this software for any + purpose with or without fee is hereby granted, provided that the above + copyright notice and this permission notice appear in all copies. + + THIS SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. +*/ + +/** + @file symap.h API for Symap, a basic symbol map (string interner). + + Particularly useful for implementing LV2 URI mapping. + + @see <a href="http://lv2plug.in/ns/ext/urid">LV2 URID</a> + @see <a href="http://lv2plug.in/ns/ext/uri-map">LV2 URI Map</a> +*/ + +#ifndef SYMAP_H +#define SYMAP_H + +#include <stdint.h> + +struct SymapImpl; + +typedef struct SymapImpl Symap; + +/** + Create a new symbol map. +*/ +Symap* +symap_new(void); + +/** + Free a symbol map. +*/ +void +symap_free(Symap* map); + +/** + Map a string to a symbol ID if it is already mapped, otherwise return 0. +*/ +uint32_t +symap_try_map(Symap* map, const char* sym); + +/** + Map a string to a symbol ID. + + Note that 0 is never a valid symbol ID. +*/ +uint32_t +symap_map(Symap* map, const char* sym); + +/** + Unmap a symbol ID back to a symbol, or NULL if no such ID exists. + + Note that 0 is never a valid symbol ID. +*/ +const char* +symap_unmap(Symap* map, uint32_t id); + +#endif /* SYMAP_H */ diff --git a/src/worker.c b/src/worker.c new file mode 100644 index 0000000..074ecf2 --- /dev/null +++ b/src/worker.c @@ -0,0 +1,118 @@ +/* + Copyright 2007-2013 David Robillard <http://drobilla.net> + + Permission to use, copy, modify, and/or distribute this software for any + purpose with or without fee is hereby granted, provided that the above + copyright notice and this permission notice appear in all copies. + + THIS SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. +*/ + +#include "worker.h" + +static LV2_Worker_Status +jalv_worker_respond(LV2_Worker_Respond_Handle handle, + uint32_t size, + const void* data) +{ + Jalv* jalv = (Jalv*)handle; + jack_ringbuffer_write(jalv->worker.responses, + (const char*)&size, sizeof(size)); + jack_ringbuffer_write(jalv->worker.responses, (const char*)data, size); + return LV2_WORKER_SUCCESS; +} + +static void* +worker_func(void* data) +{ + Jalv* jalv = (Jalv*)data; + void* buf = NULL; + while (true) { + zix_sem_wait(&jalv->worker.sem); + if (jalv->exit) { + break; + } + + uint32_t size = 0; + jack_ringbuffer_read(jalv->worker.requests, (char*)&size, sizeof(size)); + + if (!(buf = realloc(buf, size))) { + fprintf(stderr, "error: realloc() failed\n"); + free(buf); + return NULL; + } + + jack_ringbuffer_read(jalv->worker.requests, (char*)buf, size); + + jalv->worker.iface->work( + jalv->instance->lv2_handle, jalv_worker_respond, jalv, size, buf); + } + + free(buf); + return NULL; +} + +void +jalv_worker_init(Jalv* jalv, + JalvWorker* worker, + const LV2_Worker_Interface* iface) +{ + worker->iface = iface; + zix_thread_create(&worker->thread, 4096, worker_func, jalv); + worker->requests = jack_ringbuffer_create(4096); + worker->responses = jack_ringbuffer_create(4096); + worker->response = malloc(4096); + jack_ringbuffer_mlock(worker->requests); + jack_ringbuffer_mlock(worker->responses); +} + +void +jalv_worker_finish(JalvWorker* worker) +{ + if (worker->requests) { + zix_sem_post(&worker->sem); + zix_thread_join(worker->thread, NULL); + jack_ringbuffer_free(worker->requests); + jack_ringbuffer_free(worker->responses); + free(worker->response); + } +} + +LV2_Worker_Status +jalv_worker_schedule(LV2_Worker_Schedule_Handle handle, + uint32_t size, + const void* data) +{ + Jalv* jalv = (Jalv*)handle; + jack_ringbuffer_write(jalv->worker.requests, + (const char*)&size, sizeof(size)); + jack_ringbuffer_write(jalv->worker.requests, (const char*)data, size); + zix_sem_post(&jalv->worker.sem); + return LV2_WORKER_SUCCESS; +} + +void +jalv_worker_emit_responses(Jalv* jalv, JalvWorker* worker) +{ + if (worker->responses) { + uint32_t read_space = jack_ringbuffer_read_space(worker->responses); + while (read_space) { + uint32_t size = 0; + jack_ringbuffer_read(worker->responses, (char*)&size, sizeof(size)); + + jack_ringbuffer_read( + worker->responses, (char*)worker->response, size); + + worker->iface->work_response( + jalv->instance->lv2_handle, size, worker->response); + + read_space -= sizeof(size) + size; + } + } +} diff --git a/src/worker.h b/src/worker.h new file mode 100644 index 0000000..eecd7ec --- /dev/null +++ b/src/worker.h @@ -0,0 +1,35 @@ +/* + Copyright 2007-2013 David Robillard <http://drobilla.net> + + Permission to use, copy, modify, and/or distribute this software for any + purpose with or without fee is hereby granted, provided that the above + copyright notice and this permission notice appear in all copies. + + THIS SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. +*/ + +#include "lv2/lv2plug.in/ns/ext/worker/worker.h" + +#include "LV2-render_internal.h" + +void +jalv_worker_init(Jalv* jalv, + JalvWorker* worker, + const LV2_Worker_Interface* iface); + +void +jalv_worker_finish(JalvWorker* worker); + +LV2_Worker_Status +jalv_worker_schedule(LV2_Worker_Schedule_Handle handle, + uint32_t size, + const void* data); + +void +jalv_worker_emit_responses(Jalv* jalv, JalvWorker* worker); diff --git a/src/zix/common.h b/src/zix/common.h new file mode 100644 index 0000000..59e1f55 --- /dev/null +++ b/src/zix/common.h @@ -0,0 +1,83 @@ +/* + Copyright 2011 David Robillard <http://drobilla.net> + + Permission to use, copy, modify, and/or distribute this software for any + purpose with or without fee is hereby granted, provided that the above + copyright notice and this permission notice appear in all copies. + + THIS SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. +*/ + +#ifndef ZIX_COMMON_H +#define ZIX_COMMON_H + +/** + @addtogroup zix + @{ +*/ + +/** @cond */ +#ifdef ZIX_SHARED +# ifdef _WIN32 +# define ZIX_LIB_IMPORT __declspec(dllimport) +# define ZIX_LIB_EXPORT __declspec(dllexport) +# else +# define ZIX_LIB_IMPORT __attribute__((visibility("default"))) +# define ZIX_LIB_EXPORT __attribute__((visibility("default"))) +# endif +# ifdef ZIX_INTERNAL +# define ZIX_API ZIX_LIB_EXPORT +# else +# define ZIX_API ZIX_LIB_IMPORT +# endif +#else +# define ZIX_API +#endif +/** @endcond */ + +#ifdef __cplusplus +extern "C" { +#else +# include <stdbool.h> +#endif + +typedef enum { + ZIX_STATUS_SUCCESS, + ZIX_STATUS_ERROR, + ZIX_STATUS_NO_MEM, + ZIX_STATUS_NOT_FOUND, + ZIX_STATUS_EXISTS, + ZIX_STATUS_BAD_ARG, + ZIX_STATUS_BAD_PERMS, +} ZixStatus; + +/** + Function for comparing two elements. +*/ +typedef int (*ZixComparator)(const void* a, const void* b, void* user_data); + +/** + Function for testing equality of two elements. +*/ +typedef bool (*ZixEqualFunc)(const void* a, const void* b); + +/** + Function to destroy an element. +*/ +typedef void (*ZixDestroyFunc)(void* ptr); + +/** + @} +*/ + +#ifdef __cplusplus +} /* extern "C" */ +#endif + +#endif /* ZIX_COMMON_H */ diff --git a/src/zix/sem.h b/src/zix/sem.h new file mode 100644 index 0000000..6a6dc8e --- /dev/null +++ b/src/zix/sem.h @@ -0,0 +1,227 @@ +/* + Copyright 2012 David Robillard <http://drobilla.net> + + Permission to use, copy, modify, and/or distribute this software for any + purpose with or without fee is hereby granted, provided that the above + copyright notice and this permission notice appear in all copies. + + THIS SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. +*/ + +#ifndef ZIX_SEM_H +#define ZIX_SEM_H + +#ifdef __APPLE__ +# include <mach/mach.h> +#elif defined(_WIN32) +# include <limits.h> +# include <windows.h> +#else +# include <semaphore.h> +#endif + +#include "zix/common.h" + +#ifdef __cplusplus +extern "C" { +#endif + +/** + @addtogroup zix + @{ + @name Semaphore + @{ +*/ + +/** + A counting semaphore. + + This is an integer that is always positive, and has two main operations: + increment (post) and decrement (wait). If a decrement can not be performed + (i.e. the value is 0) the caller will be blocked until another thread posts + and the operation can succeed. + + Semaphores can be created with any starting value, but typically this will + be 0 so the semaphore can be used as a simple signal where each post + corresponds to one wait. + + Semaphores are very efficient (much moreso than a mutex/cond pair). In + particular, at least on Linux, post is async-signal-safe, which means it + does not block and will not be interrupted. If you need to signal from + a realtime thread, this is the most appropriate primitive to use. +*/ +typedef struct ZixSemImpl ZixSem; + +/** + Create and initialize `sem` to `initial`. +*/ +static inline ZixStatus +zix_sem_init(ZixSem* sem, unsigned initial); + +/** + Destroy `sem`. +*/ +static inline void +zix_sem_destroy(ZixSem* sem); + +/** + Increment (and signal any waiters). + Realtime safe. +*/ +static inline void +zix_sem_post(ZixSem* sem); + +/** + Wait until count is > 0, then decrement. + Obviously not realtime safe. +*/ +static inline void +zix_sem_wait(ZixSem* sem); + +/** + Non-blocking version of wait(). + + @return true if decrement was successful (lock was acquired). +*/ +static inline bool +zix_sem_try_wait(ZixSem* sem); + +/** + @cond +*/ + +#ifdef __APPLE__ + +struct ZixSemImpl { + semaphore_t sem; +}; + +static inline ZixStatus +zix_sem_init(ZixSem* sem, unsigned initial) +{ + return semaphore_create(mach_task_self(), &sem->sem, SYNC_POLICY_FIFO, 0) + ? ZIX_STATUS_ERROR : ZIX_STATUS_SUCCESS; +} + +static inline void +zix_sem_destroy(ZixSem* sem) +{ + semaphore_destroy(mach_task_self(), sem->sem); +} + +static inline void +zix_sem_post(ZixSem* sem) +{ + semaphore_signal(sem->sem); +} + +static inline void +zix_sem_wait(ZixSem* sem) +{ + semaphore_wait(sem->sem); +} + +static inline bool +zix_sem_try_wait(ZixSem* sem) +{ + const mach_timespec_t zero = { 0, 0 }; + return semaphore_timedwait(sem->sem, zero) == KERN_SUCCESS; +} + +#elif defined(_WIN32) + +struct ZixSemImpl { + HANDLE sem; +}; + +static inline ZixStatus +zix_sem_init(ZixSem* sem, unsigned initial) +{ + sem->sem = CreateSemaphore(NULL, initial, LONG_MAX, NULL); + return (sem->sem) ? ZIX_STATUS_ERROR : ZIX_STATUS_SUCCESS; +} + +static inline void +zix_sem_destroy(ZixSem* sem) +{ + CloseHandle(sem->sem); +} + +static inline void +zix_sem_post(ZixSem* sem) +{ + ReleaseSemaphore(sem->sem, 1, NULL); +} + +static inline void +zix_sem_wait(ZixSem* sem) +{ + WaitForSingleObject(sem->sem, INFINITE); +} + +static inline bool +zix_sem_try_wait(ZixSem* sem) +{ + WaitForSingleObject(sem->sem, 0); +} + +#else /* !defined(__APPLE__) && !defined(_WIN32) */ + +struct ZixSemImpl { + sem_t sem; +}; + +static inline ZixStatus +zix_sem_init(ZixSem* sem, unsigned initial) +{ + return sem_init(&sem->sem, 0, initial) + ? ZIX_STATUS_ERROR : ZIX_STATUS_SUCCESS; +} + +static inline void +zix_sem_destroy(ZixSem* sem) +{ + sem_destroy(&sem->sem); +} + +static inline void +zix_sem_post(ZixSem* sem) +{ + sem_post(&sem->sem); +} + +static inline void +zix_sem_wait(ZixSem* sem) +{ + /* Note that sem_wait always returns 0 in practice, except in + gdb (at least), where it returns nonzero, so the while is + necessary (and is the correct/safe solution in any case). + */ + while (sem_wait(&sem->sem) != 0) {} +} + +static inline bool +zix_sem_try_wait(ZixSem* sem) +{ + return (sem_trywait(&sem->sem) == 0); +} + +#endif + +/** + @endcond + @} + @} +*/ + +#ifdef __cplusplus +} /* extern "C" */ +#endif + +#endif /* ZIX_SEM_H */ diff --git a/src/zix/thread.h b/src/zix/thread.h new file mode 100644 index 0000000..b007efa --- /dev/null +++ b/src/zix/thread.h @@ -0,0 +1,133 @@ +/* + Copyright 2012-2014 David Robillard <http://drobilla.net> + + Permission to use, copy, modify, and/or distribute this software for any + purpose with or without fee is hereby granted, provided that the above + copyright notice and this permission notice appear in all copies. + + THIS SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. +*/ + +#ifndef ZIX_THREAD_H +#define ZIX_THREAD_H + +#ifdef _WIN32 +# include <windows.h> +#else +# include <errno.h> +# include <pthread.h> +#endif + +#include "zix/common.h" + +#ifdef __cplusplus +extern "C" { +#else +# include <stdbool.h> +#endif + +/** + @addtogroup zix + @{ + @name Thread + @{ +*/ + +#ifdef _WIN32 +typedef HANDLE ZixThread; +#else +typedef pthread_t ZixThread; +#endif + +/** + Initialize `thread` to a new thread. + + The thread will immediately be launched, calling `function` with `arg` + as the only parameter. +*/ +static inline ZixStatus +zix_thread_create(ZixThread* thread, + size_t stack_size, + void* (*function)(void*), + void* arg); + +/** + Join `thread` (block until `thread` exits). +*/ +static inline ZixStatus +zix_thread_join(ZixThread thread, void** retval); + +#ifdef _WIN32 + +static inline ZixStatus +zix_thread_create(ZixThread* thread, + size_t stack_size, + void* (*function)(void*), + void* arg) +{ + *thread = CreateThread(NULL, stack_size, + (LPTHREAD_START_ROUTINE)function, arg, + 0, NULL); + return *thread ? ZIX_STATUS_SUCCESS : ZIX_STATUS_ERROR; +} + +static inline ZixStatus +zix_thread_join(ZixThread thread, void** retval) +{ + return WaitForSingleObject(thread, INFINITE) + ? ZIX_STATUS_SUCCESS : ZIX_STATUS_ERROR; +} + +#else /* !defined(_WIN32) */ + +static inline ZixStatus +zix_thread_create(ZixThread* thread, + size_t stack_size, + void* (*function)(void*), + void* arg) +{ + pthread_attr_t attr; + pthread_attr_init(&attr); + pthread_attr_setstacksize(&attr, stack_size); + + const int ret = pthread_create(thread, NULL, function, arg); + pthread_attr_destroy(&attr); + + if (ret == EAGAIN) { + return ZIX_STATUS_NO_MEM; + } else if (ret == EINVAL) { + return ZIX_STATUS_BAD_ARG; + } else if (ret == EPERM) { + return ZIX_STATUS_BAD_PERMS; + } else if (ret) { + return ZIX_STATUS_ERROR; + } + + return ZIX_STATUS_SUCCESS; +} + +static inline ZixStatus +zix_thread_join(ZixThread thread, void** retval) +{ + return pthread_join(thread, retval) + ? ZIX_STATUS_ERROR : ZIX_STATUS_SUCCESS; +} + +#endif + +/** + @} + @} +*/ + +#ifdef __cplusplus +} /* extern "C" */ +#endif + +#endif /* ZIX_THREAD_H */ diff --git a/test/example.mid b/test/example.mid Binary files differnew file mode 100644 index 0000000..bf3bfc0 --- /dev/null +++ b/test/example.mid diff --git a/test/short_example.mid b/test/short_example.mid Binary files differBinary files differnew file mode 100644 index 0000000..a790260 --- /dev/null +++ b/test/short_example.mid @@ -0,0 +1,103 @@ +#!/usr/bin/env python +import subprocess +from waflib.extras import autowaf as autowaf +import waflib.Options as Options + +# Version of this package (even if built as a child) +#this one? loo ks so +JALV_VERSION = '1.4.6' + +# Variables for 'waf dist' +APPNAME = 'LV2-render' +VERSION = JALV_VERSION + +# Mandatory variables +top = '.' +out = 'build' + +def options(opt): + opt.load('compiler_c') + opt.load('compiler_cxx') + autowaf.set_options(opt) + opt.add_option('--no-jack-session', action='store_true', default=False, + dest='no_jack_session', + help="Do not build JACK session support") + opt.add_option('--no-qt', action='store_true', default=False, + dest='no_qt', + help="Do not build Qt GUI") + +def configure(conf): + conf.line_just = 52 + conf.load('compiler_c') + conf.load('compiler_cxx') + conf.env.append_unique('LIB', 'm') # should be it, not sure so this is some python-based autotools thing, I guess? yeah + conf.env.append_unique('LIB', 'sndfile') # should be it, not sure so this is some python-based autotools thing, I guess? yeah + autowaf.configure(conf) + autowaf.set_c99_mode(conf) + autowaf.display_header('Jalv Configuration') + + autowaf.check_pkg(conf, 'lv2', atleast_version='1.8.1', uselib_store='LV2') + autowaf.check_pkg(conf, 'lilv-0', uselib_store='LILV', + atleast_version='0.19.2', mandatory=True) + autowaf.check_pkg(conf, 'serd-0', uselib_store='SERD', + atleast_version='0.14.0', mandatory=True) + autowaf.check_pkg(conf, 'sord-0', uselib_store='SORD', + atleast_version='0.12.0', mandatory=True) + autowaf.check_pkg(conf, 'suil-0', uselib_store='SUIL', + atleast_version='0.6.0', mandatory=True) + autowaf.check_pkg(conf, 'sratom-0', uselib_store='SRATOM', + atleast_version='0.4.0', mandatory=True) + autowaf.check_pkg(conf, 'jack', uselib_store='JACK', + atleast_version='0.120.0', mandatory=True) + + conf.check(function_name='jack_port_type_get_buffer_size', + header_name='jack/jack.h', + define_name='HAVE_JACK_PORT_TYPE_GET_BUFFER_SIZE', + uselib='JACK', + mandatory=False) + + conf.check(function_name='jack_set_property', + header_name='jack/metadata.h', + define_name='HAVE_JACK_METADATA', + uselib='JACK', + mandatory=False) + + if not Options.options.no_jack_session: + autowaf.define(conf, 'JALV_JACK_SESSION', 1) + + autowaf.define(conf, 'JALV_VERSION', JALV_VERSION) + + conf.write_config_header('jalv_config.h', remove=False) + + autowaf.display_msg(conf, "Jack metadata support", + conf.is_defined('HAVE_JACK_METADATA')) + print('') + +def build(bld): + libs = 'LILV SUIL JACK SERD SORD SRATOM LV2' + + source = 'src/LV2-render.c src/symap.c src/state.c src/lv2_evbuf.c src/worker.c src/log.c' + source += ' src/midi/midi_loader.c src/midi/fluid_midi.c src/midi/fluid_list.c' + + obj = bld(features = 'c cprogram', + source = source + ' src/LV2-render_console.c', + target = 'LV2-render', + includes = ['.', 'src', 'midi'], + lib = ['pthread'], + install_path = '${BINDIR}') + autowaf.use_lib(bld, obj, libs) + + + # Man pages + bld.install_files('${MANDIR}/man1', bld.path.ant_glob('doc/*.1')) + +def upload_docs(ctx): + import glob + import os + for page in glob.glob('doc/*.[1-8]'): + os.system('mkdir -p build/doc') + os.system('soelim %s | pre-grohtml troff -man -wall -Thtml | post-grohtml > build/%s.html' % (page, page)) + os.system('rsync -avz --delete -e ssh build/%s.html drobilla@drobilla.net:~/drobilla.net/man/' % page) + +def lint(ctx): + subprocess.call('cpplint.py --filter=+whitespace/comments,-whitespace/tab,-whitespace/braces,-whitespace/labels,-build/header_guard,-readability/casting,-readability/todo,-build/include,-runtime/sizeof src/* jalv/*', shell=True) |
