Projects STRLCPY snscrape Commits e6aae353
🤬
Revision indexing in progress... (symbol navigation in revisions will be accurate after indexed)
  • ■ ■ ■ ■ ■
    setup.py
    skipped 2 lines
    3 3   
    4 4  setuptools.setup(
    5 5   name = 'snscrape',
    6  - version = '0.2.0',
    7 6   description = 'A social networking service scraper',
    8 7   author = 'JustAnotherArchivist',
    9 8   url = 'https://github.com/JustAnotherArchivist/snscrape',
    skipped 3 lines
    13 12   'Programming Language :: Python :: 3.6',
    14 13   ],
    15 14   packages = ['snscrape', 'snscrape.modules'],
     15 + setup_requires = ['setuptools_scm'],
     16 + use_scm_version = True,
    16 17   install_requires = ['requests[socks]', 'lxml', 'beautifulsoup4'],
    17 18   entry_points = {
    18 19   'console_scripts': [
    skipped 5 lines
  • ■ ■ ■ ■ ■
    snscrape/cli.py
    skipped 6 lines
    7 7  # Imported in parse_args() after setting up the logger:
    8 8  #import snscrape.base
    9 9  #import snscrape.modules
     10 +#import snscrape.version
    10 11  import tempfile
    11 12   
    12 13   
    skipped 135 lines
    148 149   
    149 150   
    150 151  def parse_args():
     152 + import snscrape.base
     153 + import snscrape.modules
     154 + import snscrape.version
     155 + 
    151 156   parser = argparse.ArgumentParser(formatter_class = argparse.ArgumentDefaultsHelpFormatter)
     157 + parser.add_argument('--version', action = 'version', version = f'snscrape {snscrape.version.__version__}')
    152 158   parser.add_argument('-v', '--verbose', '--verbosity', dest = 'verbosity', action = 'count', default = 0, help = 'Increase output verbosity')
    153 159   parser.add_argument('--dump-locals', dest = 'dumpLocals', action = 'store_true', default = False, help = 'Dump local variables on serious log messages (warnings or higher)')
    154 160   parser.add_argument('--retry', '--retries', dest = 'retries', type = int, default = 3, metavar = 'N',
    skipped 3 lines
    158 164   parser.add_argument('--since', type = parse_datetime_arg, metavar = 'DATETIME', help = 'Only return results newer than DATETIME')
    159 165   
    160 166   subparsers = parser.add_subparsers(dest = 'scraper', help = 'The scraper you want to use')
    161  - import snscrape.base
    162  - import snscrape.modules
    163 167   classes = snscrape.base.Scraper.__subclasses__()
    164 168   for cls in classes:
    165 169   if cls.name is not None:
    skipped 68 lines
  • ■ ■ ■ ■ ■ ■
    snscrape/version.py
     1 +import pkg_resources
     2 + 
     3 + 
     4 +try:
     5 + __version__ = pkg_resources.get_distribution('snscrape').version
     6 +except pkg_resources.DistributionNotFound:
     7 + __version__ = None
     8 + 
Please wait...
Page is in error, reload to recover