Python config.settings() Examples

The following are 30 code examples of config.settings(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module config , or try the search function .
Example #1
Source File: codeinjector.py    From fimap with GNU General Public License v2.0 6 votes vote down vote up
def executeRFI(self, URL, postdata, appendix, content, header):
        content = self.payload_encode(content)
        
        if (appendix == "%00"): appendix = ""
        if settings["dynamic_rfi"]["mode"]=="ftp":
            up = self.FTPuploadFile(content, appendix)
            code = self.doPostRequest(URL, postdata, header)
            if up["dirstruct"]:
                self.FTPdeleteDirectory(up["ftp"])
            else:
                self.FTPdeleteFile(up["ftp"])
            return(code)
        elif settings["dynamic_rfi"]["mode"]=="local":
            up = self.putLocalPayload(content, appendix)
            code = self.doPostRequest(URL, postdata, additionalHeaders=header)
            self.deleteLocalPayload(up["local"])
            return(code) 
Example #2
Source File: model.py    From luci-py with Apache License 2.0 6 votes vote down vote up
def entry_key_from_id(key_id):
  """Returns the ndb.Key for the key_id."""
  namespace, hash_key = key_id.rsplit('/', 1)
  # https://crbug.com/944896
  N = config.settings().sharding_letters
  assert N in (1, 4), N
  if namespace != 'default-gzip':
    # This is to work around https://crbug.com/943571, where prod instances have
    # sharding_letters: 1. Oops.
    #
    # This is a temporary hack until we migrate off default-gzip and
    # deprecate sharding_letters.
    N = 4
  return ndb.Key(
      ContentEntry, key_id,
      parent=datastore_utils.shard_key(hash_key, N, 'ContentShard')) 
Example #3
Source File: baseClass.py    From fimap with GNU General Public License v2.0 6 votes vote down vote up
def putLocalPayload(self, content, append):
        fl = settings["dynamic_rfi"]["local"]["local_path"] + append
        dirname = os.path.dirname(fl)
        if (not os.path.exists(dirname)):
            os.makedirs(dirname)
        up = {}
        
        up["local"] = settings["dynamic_rfi"]["local"]["local_path"]
        if append.find("/") != -1 and (not append.startswith("/")):
            up["local"] = settings["dynamic_rfi"]["local"]["local_path"] + append[:append.find("/")]
        up["http"] = settings["dynamic_rfi"]["local"]["http_map"]
        f = open(fl, "w")
        f.write(content)
        f.close()
        
        return(up) 
Example #4
Source File: model.py    From luci-py with Apache License 2.0 6 votes vote down vote up
def delete_entry_and_gs_entry_async(key):
  """Deletes synchronously a ContentEntry and its GS file.

  It deletes the ContentEntry first, then the file in GS. The worst case is that
  the GS file is left behind and will be reaped by a lost GS task queue. The
  reverse is much worse, having a ContentEntry pointing to a deleted GS entry
  will lead to lookup failures.
  """
  bucket = config.settings().gs_bucket
  # Note that some content entries may NOT have corresponding GS files. That
  # happens for small entry stored inline in the datastore. Since this function
  # operates only on keys, it can't distinguish "large" entries stored in GS
  # from "small" ones stored inline. So instead it always tries to delete the
  # corresponding GS files, silently skipping ones that are not there.
  # Always delete ContentEntry first.
  name = key.string_id()
  yield key.delete_async()
  # This is synchronous.
  yield gcs.delete_file_async(bucket, name, ignore_missing=True)
  raise ndb.Return(None) 
Example #5
Source File: baseClass.py    From fimap with GNU General Public License v2.0 6 votes vote down vote up
def FTPdeleteDirectory(self, directory, ftp = None):
        host = settings["dynamic_rfi"]["ftp"]["ftp_host"]
        user = settings["dynamic_rfi"]["ftp"]["ftp_user"]
        pw   = settings["dynamic_rfi"]["ftp"]["ftp_pass"]
        if ftp == None: 
            self._log("Deleting directory recursivly from FTP server '%s'..."%(host), self.LOG_DEBUG)
            ftp = FTP(host, user, pw)
        
        ftp.cwd(directory)
        for i in ftp.nlst(directory):
            try:
                ftp.delete(i)
            except:
                self.FTPdeleteDirectory(i, ftp)
            
        ftp.cwd(directory)
        ftp.rmd(directory) 
Example #6
Source File: metrics.py    From luci-py with Apache License 2.0 6 votes vote down vote up
def file_size(size):
  """Reports the size of a file fetched from GCS by whitelisted clients.

  If the client's requests are not whitelisted for monitoring, does nothing.

  Args:
    size: Size of the file in bytes.
  """
  ip = auth.get_peer_ip()
  for cfg in config.settings().client_monitoring_config:
    if auth.is_in_ip_whitelist(cfg.ip_whitelist, ip):
      _bytes_requested.increment_by(
          size,
          fields={
              'client_name': cfg.label,
              'client_email': auth.get_peer_identity().to_bytes(),
              'download_source': 'GCS'
          })
      return 
Example #7
Source File: handlers_frontend.py    From luci-py with Apache License 2.0 6 votes vote down vote up
def post(self):
    # Convert MultiDict into a dict.
    params = {
      k: self.cast_to_type(k, self.request.params.getone(k))
      for k in self.request.params
      if k not in ('keyid', 'xsrf_token')
    }
    cfg = config.settings(fresh=True)
    keyid = int(self.request.get('keyid', '0'))
    if cfg.key.integer_id() != keyid:
      self.common('Update conflict %s != %s' % (cfg.key.integer_id(), keyid))
      return
    cfg.populate(**params)
    try:
      # Ensure key is correct, it's easy to make a mistake when creating it.
      gcs.URLSigner.load_private_key(cfg.gs_private_key)
    except Exception as exc:
      # TODO(maruel): Handling Exception is too generic. And add self.abort(400)
      self.response.write('Bad private key: %s' % exc)
      return
    cfg.store(updated_by=auth.get_current_identity().to_bytes())
    self.common('Settings updated') 
Example #8
Source File: handlers_backend_test.py    From luci-py with Apache License 2.0 6 votes vote down vote up
def setUp(self):
    """Creates a new app instance for every test case."""
    super(MainTest, self).setUp()
    self.testbed.init_blobstore_stub()
    self.testbed.init_urlfetch_stub()
    admin = auth.Identity(auth.IDENTITY_USER, 'admin@example.com')
    full_access_group = config.settings().auth.full_access_group
    auth.bootstrap_group(full_access_group, [admin])
    auth_testing.mock_get_current_identity(self, admin)
    version = utils.get_app_version()
    self.mock(utils, 'get_task_queue_host', lambda: version)
    self.testbed.setup_env(current_version_id='testbed.version')
    self.source_ip = '127.0.0.1'
    self.app = webtest.TestApp(
        handlers_backend.create_application(debug=True),
        extra_environ={'REMOTE_ADDR': self.source_ip})
    # add a private key; signing depends on config.settings()
    make_private_key()
    # Remove the check for dev server in should_push_to_gs().
    self.mock(utils, 'is_local_dev_server', lambda: False) 
Example #9
Source File: main_backend.py    From luci-py with Apache License 2.0 6 votes vote down vote up
def create_application():
  ereporter2.register_formatter()

  # Zap out the ndb in-process cache by default.
  # This cache causes excessive memory usage in in handler where a lot of
  # entities are fetched in one query. When coupled with high concurrency
  # as specified via max_concurrent_requests in app.yaml, this may cause out of
  # memory errors.
  ndb.Context.default_cache_policy = staticmethod(lambda _key: False)
  ndb.Context._cache_policy = staticmethod(lambda _key: False)

  backend = handlers_backend.create_application(False)

  def is_enabled_callback():
    return config.settings().enable_ts_monitoring

  gae_ts_mon.initialize(backend, is_enabled_fn=is_enabled_callback)
  return backend 
Example #10
Source File: handlers_frontend_test.py    From luci-py with Apache License 2.0 5 votes vote down vote up
def test_config_conflict(self):
    self.set_as_admin()
    resp = self.app.get('/restricted/config')
    # TODO(maruel): Use beautifulsoup?
    params = {
      'google_analytics': 'foobar',
      'keyid': str(config.settings().key.integer_id() - 1),
      'reusable_task_age_secs': 30,
      'xsrf_token': self.get_xsrf_token(),
    }
    self.assertEqual('', config.settings().google_analytics)
    resp = self.app.post('/restricted/config', params)
    self.assertIn('Update conflict', resp)
    self.assertEqual('', config.settings().google_analytics) 
Example #11
Source File: handlers_frontend.py    From luci-py with Apache License 2.0 5 votes vote down vote up
def get(self):
    params = {
      'client_id': config.settings().ui_client_id,
    }
    # Can cache for 1 week, because the only thing that would change in this
    # template is the oauth client id, which changes very infrequently.
    self.response.cache_control.no_cache = None
    self.response.cache_control.public = True
    self.response.cache_control.max_age = 604800
    try:
      self.response.write(template.render(
        'isolate/public_isolate_index.html', params))
    except template.TemplateNotFound:
      self.abort(404, 'Page not found.') 
Example #12
Source File: config_test.py    From luci-py with Apache License 2.0 5 votes vote down vote up
def test_validate_defaults_cached(self):
    self.validator_test(config.settings()._cfg, []) 
Example #13
Source File: config_test.py    From luci-py with Apache License 2.0 5 votes vote down vote up
def test_validate_default_expiration(self):
    cfg = config.settings()._cfg
    cfg.default_expiration = -1
    self.validator_test(cfg, ['default_expiration cannot be negative']) 
Example #14
Source File: config_test.py    From luci-py with Apache License 2.0 5 votes vote down vote up
def test_validate_sharding_letters(self):
    cfg = config.settings()._cfg
    cfg.sharding_letters = -1
    self.validator_test(cfg, ['sharding_letters must be within [0..5]'])
    cfg.sharding_letters = 6
    self.validator_test(cfg, ['sharding_letters must be within [0..5]']) 
Example #15
Source File: config_test.py    From luci-py with Apache License 2.0 5 votes vote down vote up
def test_validate_gs_bucket(self):
    cfg = config.settings()._cfg
    cfg.gs_bucket = 'b@d_b1cket'
    self.validator_test(cfg, ['gs_bucket invalid value: b@d_b1cket']) 
Example #16
Source File: config_test.py    From luci-py with Apache License 2.0 5 votes vote down vote up
def test_validate_gs_client_id_email(self):
    cfg = config.settings()._cfg
    cfg.gs_client_id_email = 'not.an.email'
    self.validator_test(cfg, ['gs_client_id_email invalid value: not.an.email'])

    cfg.gs_client_id_email = 'valid@email.net'
    self.validator_test(cfg, []) 
Example #17
Source File: config_test.py    From luci-py with Apache License 2.0 5 votes vote down vote up
def test_gitiles_url_none(self):
    self.assertIsNone(config._gitiles_url(None, 'testrev', 'settings.cfg')) 
Example #18
Source File: workers_requirements.py    From bt-mqtt-gateway with MIT License 5 votes vote down vote up
def configured_workers():
    from config import settings

    workers = settings['manager']['workers']
    return _get_requirements(workers) 
Example #19
Source File: baseClass.py    From fimap with GNU General Public License v2.0 5 votes vote down vote up
def FTPdeleteFile(self, file):
        host = settings["dynamic_rfi"]["ftp"]["ftp_host"]
        user = settings["dynamic_rfi"]["ftp"]["ftp_user"]
        pw   = settings["dynamic_rfi"]["ftp"]["ftp_pass"]
        self._log("Deleting payload (%s) from FTP server '%s'..."%(file, host), self.LOG_DEBUG)
        ftp = FTP(host, user, pw)
        ftp.delete(file)
        ftp.quit() 
Example #20
Source File: handlers_frontend_test.py    From luci-py with Apache License 2.0 5 votes vote down vote up
def test_config(self):
    self.set_as_admin()
    resp = self.app.get('/restricted/config')
    # TODO(maruel): Use beautifulsoup?
    priv_key = 'test private key'
    params = {
      'gs_private_key': priv_key,
      'keyid': str(config.settings_info()['cfg'].key.integer_id()),
      'xsrf_token': self.get_xsrf_token(),
    }
    self.assertEqual('', config.settings().gs_private_key)
    resp = self.app.post('/restricted/config', params)
    self.assertNotIn('Update conflict', resp)
    self.assertEqual(priv_key, config.settings().gs_private_key) 
Example #21
Source File: handlers_frontend_test.py    From luci-py with Apache License 2.0 5 votes vote down vote up
def setUp(self):
    """Creates a new app instance for every test case."""
    super(MainTest, self).setUp()
    self.testbed.init_user_stub()

    self.source_ip = '192.168.0.1'
    self.app = webtest.TestApp(
        handlers_frontend.create_application(debug=True),
        extra_environ={'REMOTE_ADDR': self.source_ip})

    self.auth_app = webtest.TestApp(
        auth.create_wsgi_application(debug=True),
        extra_environ={
          'REMOTE_ADDR': self.source_ip,
          'SERVER_SOFTWARE': os.environ['SERVER_SOFTWARE'],
        })

    full_access_group = config.settings().auth.full_access_group
    readonly_access_group = config.settings().auth.readonly_access_group

    auth.bootstrap_group(
        auth.ADMIN_GROUP,
        [auth.Identity(auth.IDENTITY_USER, 'admin@example.com')])
    auth.bootstrap_group(
        readonly_access_group,
        [auth.Identity(auth.IDENTITY_USER, 'reader@example.com')])
    auth.bootstrap_group(
        full_access_group,
        [auth.Identity(auth.IDENTITY_USER, 'writer@example.com')])
    self.set_as_anonymous() 
Example #22
Source File: handlers_backend.py    From luci-py with Apache License 2.0 5 votes vote down vote up
def post(self):
    """Enumerates all GS files and delete those that do not have an associated
    ContentEntry.
    """
    gs_bucket = config.settings().gs_bucket
    logging.debug('Operating on GCS bucket: %s', gs_bucket)
    total = _incremental_delete(
        _yield_orphan_gcs_files(gs_bucket),
        lambda f: gcs.delete_file_async(gs_bucket, f, True))
    logging.info('Deleted %d lost GS files', total)
    # TODO(maruel): Find all the empty directories that are old and remove them.
    # We need to safe guard against the race condition where a user would upload
    # to this directory. 
Example #23
Source File: handlers_backend_test.py    From luci-py with Apache License 2.0 5 votes vote down vote up
def test_cron_cleanup_trigger_expired(self):
    # Asserts that old entities are deleted through a task queue.

    # Removes the jitter.
    def _expiration_jitter(now, expiration):
      out = now + datetime.timedelta(seconds=expiration)
      return out, out
    self.mock(model, 'expiration_jitter', _expiration_jitter)
    now = self.mock_now(datetime.datetime(2020, 1, 2, 3, 4, 5), 0)
    request = self.store_request('sha1-raw', 'Foo')
    self.call_api('store_inline', message_to_dict(request))
    self.assertEqual(1, model.ContentEntry.query().count())

    self.mock_now(now, config.settings().default_expiration)
    self.app.get(
        '/internal/cron/cleanup/trigger/expired',
        headers={'X-AppEngine-Cron': 'true'})
    self.assertEqual(1, model.ContentEntry.query().count())
    self.assertEqual(0, self.execute_tasks())

    # Try again, second later.
    self.mock_now(now, config.settings().default_expiration+1)
    self.app.get(
        '/internal/cron/cleanup/trigger/expired',
        headers={'X-AppEngine-Cron': 'true'})
    self.assertEqual(1, model.ContentEntry.query().count())

    # The query task queue triggers deletion task queues.
    self.assertEqual(2, self.execute_tasks())
    # Boom it's gone.
    self.assertEqual(0, model.ContentEntry.query().count()) 
Example #24
Source File: handlers_backend_test.py    From luci-py with Apache License 2.0 5 votes vote down vote up
def make_private_key():
  new_key = RSA.generate(1024)
  pem_key = base64.b64encode(new_key.exportKey('PEM'))
  config.settings()._ds_cfg.gs_private_key = pem_key 
Example #25
Source File: acl.py    From luci-py with Apache License 2.0 5 votes vote down vote up
def isolate_readable():
  """Returns True if current user can read from isolate."""
  read_only = config.settings().auth.readonly_access_group
  return auth.is_group_member(read_only) or isolate_writable() 
Example #26
Source File: acl.py    From luci-py with Apache License 2.0 5 votes vote down vote up
def isolate_writable():
  """Returns True if current user can write to isolate."""
  full_access = auth.is_group_member(config.settings().auth.full_access_group)
  return full_access or auth.is_admin() 
Example #27
Source File: handlers_endpoints_v1_test.py    From luci-py with Apache License 2.0 5 votes vote down vote up
def setUp(self):
    super(IsolateServiceTest, self).setUp()
    self.testbed.init_blobstore_stub()
    self.testbed.init_urlfetch_stub()
    # It seems like there is a singleton state preserved across the tests,
    # making it hard to re-run the complete setUp procedure. Therefore we pre-
    # register all the possible identities being used in the tests.
    all_authed_ids = [
        auth.Identity(auth.IDENTITY_USER, 'admin@example.com'),
        auth.Identity(auth.IDENTITY_USER, 'admin@appspot.gserviceaccount.com'),
        auth.Identity(auth.IDENTITY_SERVICE, 'adminapp'),
    ]
    admin = all_authed_ids[0]
    full_access_group = config.settings().auth.full_access_group
    auth.bootstrap_group(full_access_group, all_authed_ids)
    auth_testing.mock_get_current_identity(self, admin)
    version = utils.get_app_version()
    self.mock(utils, 'get_task_queue_host', lambda: version)
    self.testbed.setup_env(current_version_id='testbed.version')
    self.source_ip = '127.0.0.1'
    # It is needed solely for self.execute_tasks(), which processes tasks queues
    # on the backend application.
    self.app = webtest.TestApp(
        handlers_backend.create_application(debug=True),
        extra_environ={'REMOTE_ADDR': self.source_ip})
    # add a private key; signing depends on config.settings()
    make_private_key()
    # Remove the check for dev server in should_push_to_gs().
    self.mock(utils, 'is_local_dev_server', lambda: False) 
Example #28
Source File: handlers_endpoints_v1_test.py    From luci-py with Apache License 2.0 5 votes vote down vote up
def make_private_key():
  new_key = RSA.generate(1024)
  pem_key = base64.b64encode(new_key.exportKey('PEM'))
  config.settings()._ds_cfg.gs_private_key = pem_key 
Example #29
Source File: main_frontend.py    From luci-py with Apache License 2.0 5 votes vote down vote up
def create_application():
  ereporter2.register_formatter()

  # Zap out the ndb in-process cache by default.
  # This cache causes excessive memory usage in in handler where a lot of
  # entities are fetched in one query. When coupled with high concurrency
  # as specified via max_concurrent_requests in app.yaml, this may cause out of
  # memory errors.
  ndb.Context.default_cache_policy = staticmethod(lambda _key: False)
  ndb.Context._cache_policy = staticmethod(lambda _key: False)

  # App that serves HTML pages and old API.
  frontend = handlers_frontend.create_application(False)

  def is_enabled_callback():
    return config.settings().enable_ts_monitoring

  gae_ts_mon.initialize(frontend, is_enabled_fn=is_enabled_callback)
  # App that serves new endpoints API.
  endpoints_api = endpoints_webapp2.api_server([
      handlers_endpoints_v1.IsolateService,
      # components.config endpoints for validation and configuring of
      # luci-config service URL.
      config.ConfigApi,
  ])
  gae_ts_mon.instrument_wsgi_application(endpoints_api)

  prpc_api = webapp2.WSGIApplication(handlers_prpc.get_routes())
  return frontend, endpoints_api, prpc_api 
Example #30
Source File: model.py    From luci-py with Apache License 2.0 5 votes vote down vote up
def new_content_entry(key, **kwargs):
  """Generates a new ContentEntry for the request.

  Doesn't store it. Just creates a new ContentEntry instance.
  """
  expiration, next_tag = expiration_jitter(
      utils.utcnow(), config.settings().default_expiration)
  return ContentEntry(
      key=key, expiration_ts=expiration, next_tag_ts=next_tag, **kwargs)