Python inspect.stack() Examples

The following are 30 code examples of inspect.stack(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module inspect , or try the search function .
Example #1
Source File: test_action.py    From aws-ops-automator with Apache License 2.0 6 votes vote down vote up
def create_resource_stack(cls, resource_stack_name):
        try:
            cls.logger.test("Creating test resources stack {}", resource_stack_name)
            resource_stack = Stack(resource_stack_name, region=region())
            default_vpc_id = cls.ec2.get_default_vpc()
            assert default_vpc_id is not None
            resource_stack.create_stack(template_file=template_path(__file__, TEST_RESOURCES_TEMPLATE),
                                        timeout=1200,
                                        iam_capability=True,
                                        params={
                                            "VpcId": default_vpc_id["VpcId"],
                                        })
            return resource_stack
        except Exception as ex:
            cls.logger.test("Error creating stack {}, {}", resource_stack_name, ex)
            return None 
Example #2
Source File: MoveGenerator.py    From fullrmc with GNU Affero General Public License v3.0 6 votes vote down vote up
def transform_coordinates(self, coordinates, argument=None):
        """
        Transform coordinates. This method is called to move atoms.
        This method must be overloaded in all MoveGenerator sub-classes.

        :Parameters:
            #. coordinates (np.ndarray): The coordinates on which to apply
               the move.
            #. argument (object): Any other argument needed to perform the
               move. In General it's not needed.

        :Returns:
            #. coordinates (np.ndarray): The new coordinates after applying
               the move.
        """
        raise Exception(LOGGER.impl("%s '%s' method must be overloaded"%(self.__class__.__name__,inspect.stack()[0][3]))) 
Example #3
Source File: test_action.py    From aws-ops-automator with Apache License 2.0 6 votes vote down vote up
def create_resource_stack(cls, resource_stack_name):
        try:
            cls.logger.test("Creating test resources stack {}", resource_stack_name)
            ami = Ec2(region()).latest_aws_linux_image["ImageId"]
            resource_stack = Stack(resource_stack_name, region=region())

            stack_parameters = {
                "InstanceAmi": ami, "InstanceType": "t2.micro",
                "TaskListTagName": tasklist_tagname(TESTED_ACTION),
                "TaskListTagValueNoCPULoad": "test_instance_no_cpu_load",
                "TaskListTagValueCPULoad": "test_instance_cpu_load"
            }

            resource_stack.create_stack(template_file=template_path(__file__, TEST_RESOURCES_TEMPLATE),
                                        iam_capability=True,
                                        params=stack_parameters)
            return resource_stack
        except Exception as ex:
            cls.logger.test("Error creating stack {}, {}", resource_stack_name, ex)
            return None 
Example #4
Source File: test_action.py    From aws-ops-automator with Apache License 2.0 6 votes vote down vote up
def create_resource_stack(cls, resource_stack_name):
        try:
            cls.logger.test("Creating test resources stack {}", resource_stack_name)
            ami = Ec2(region()).latest_aws_linux_image["ImageId"]
            resource_stack = Stack(resource_stack_name, region=region())
            resource_stack.create_stack(template_file=template_path(__file__, TEST_RESOURCES_TEMPLATE),
                                        timeout=1200,
                                        iam_capability=True,
                                        params={
                                            "InstanceAmi": ami,
                                            "InstanceType": TEST_INSTANCE_TYPES[0]
                                        })
            return resource_stack
        except Exception as ex:
            cls.logger.test("Error creating stack {}, {}", resource_stack_name, ex)
            return None 
Example #5
Source File: adventure.py    From Dumb-Cogs with MIT License 6 votes vote down vote up
def install_words(game):
    # stack()[0] is this; stack()[1] is adventure.play(); so, stack()[2]
    namespace = inspect.stack()[2][0].f_globals
    words = [ k for k in game.vocabulary if isinstance(k, str) ]
    words.append('yes')
    words.append('no')
    for word in words:
        identifier = ReprTriggeredPhrase(game, [ word ])
        namespace[word] = identifier
        if len(word) > 5:
            namespace[word[:5]] = identifier




#__main__.py 
Example #6
Source File: utils.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 6 votes vote down vote up
def logging_config(name=None, level=logging.DEBUG, console_level=logging.DEBUG):
    if name is None:
        name = inspect.stack()[1][1].split('.')[0]
    folder = os.path.join(os.getcwd(), name)
    if not os.path.exists(folder):
        os.makedirs(folder)
    logpath = os.path.join(folder, name + ".log")
    print("All Logs will be saved to %s"  %logpath)
    logging.root.setLevel(level)
    formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
    logfile = logging.FileHandler(logpath)
    logfile.setLevel(level)
    logfile.setFormatter(formatter)
    logging.root.addHandler(logfile)
    #TODO Update logging patterns in other files
    logconsole = logging.StreamHandler()
    logconsole.setLevel(console_level)
    logconsole.setFormatter(formatter)
    logging.root.addHandler(logconsole)
    return folder 
Example #7
Source File: Constraint.py    From fullrmc with GNU Affero General Public License v3.0 6 votes vote down vote up
def set_used(self, value, frame=None):
        """
        Set used flag.

        :Parameters:
            #. value (boolean): True to use this constraint in stochastic
               engine runtime.
            #. frame (None, string): Target frame name. If None, engine used
               frame is used. If multiframe is given, all subframes will be
               targeted. If subframe is given, all other multiframe subframes
               will be targeted.
        """
        assert isinstance(value, bool), LOGGER.error("value must be boolean")
        # get used frame
        if self.engine is not None:
            print(self.engine)
            print(self.engine.usedFrame)
        usedIncluded, frame, allFrames = get_caller_frames(engine=self.engine,
                                                           frame=frame,
                                                           subframeToAll=True,
                                                           caller="%s.%s"%(self.__class__.__name__,inspect.stack()[0][3]) )
        if usedIncluded:
            self.__used = value
        for frm in allFrames:
            self._dump_to_repository({'_Constraint__used' :value}, frame=frm) 
Example #8
Source File: test_action.py    From aws-ops-automator with Apache License 2.0 6 votes vote down vote up
def create_resource_stack(cls, resource_stack_name):
        try:
            cls.logger.test("Creating test resources stack {}", resource_stack_name)
            ami = Ec2(region()).latest_aws_linux_image["ImageId"]
            resource_stack = Stack(resource_stack_name, region=region())
            resource_stack.create_stack(template_file=template_path(__file__, TEST_RESOURCES_TEMPLATE), iam_capability=True,
                                        params={
                                            "InstanceAmi": ami,
                                            "InstanceType": "t2.micro",
                                            "TaskListTagName": tasklist_tagname(TESTED_ACTION),
                                            "TaskListTagValue": ",".join(cls.get_methods())
                                        })
            return resource_stack
        except Exception as ex:
            cls.logger.test("Error creating stack {}, {}", resource_stack_name, ex)
            return None 
Example #9
Source File: test_action.py    From aws-ops-automator with Apache License 2.0 5 votes vote down vote up
def test_scale_up_not_avail_next(self):
        self.do_test_replace(test_method=inspect.stack()[0][3],
                             load_balancing=False,
                             mode=replace_instance.REPLACE_BY_STEP,
                             tags={"scaling": "up"},
                             multiple_volumes=False,
                             replaced_type=TEST_INSTANCE_TYPES[0],
                             unavailable_types=[TEST_INSTANCE_TYPES[1]],
                             expected_new_type=TEST_INSTANCE_TYPES[2]) 
Example #10
Source File: test_action.py    From aws-ops-automator with Apache License 2.0 5 votes vote down vote up
def test_step_down_no_next(self):
        self.do_test_resize(test_method=inspect.stack()[0][3],
                            resize_mode=r.RESIZE_BY_STEP,
                            tags={"scaling": "down"},
                            start_type=TEST_INSTANCE_TYPES[2],
                            try_next_in_range=False,
                            scaling_range=TEST_INSTANCE_TYPES,
                            unavailable_types=TEST_INSTANCE_TYPES[1:2],
                            expected_type=TEST_INSTANCE_TYPES[2]) 
Example #11
Source File: test_action.py    From aws-ops-automator with Apache License 2.0 5 votes vote down vote up
def test_scale_up_not_avail(self):
        self.do_test_replace(test_method=inspect.stack()[0][3],
                             load_balancing=False,
                             mode=replace_instance.REPLACE_BY_STEP,
                             tags={"scaling": "up"},
                             multiple_volumes=False,
                             replaced_type=TEST_INSTANCE_TYPES[0],
                             unavailable_types=[TEST_INSTANCE_TYPES[1:]],
                             expected_new_type=TEST_INSTANCE_TYPES[0]) 
Example #12
Source File: test_action.py    From aws-ops-automator with Apache License 2.0 5 votes vote down vote up
def test_scale_up_assumed(self):
        self.do_test_replace(test_method=inspect.stack()[0][3],
                             load_balancing=False,
                             mode=replace_instance.REPLACE_BY_STEP,
                             tags={"scaling": "up"},
                             multiple_volumes=False,
                             replaced_type="t2.nano",
                             assumed_type=TEST_INSTANCE_TYPES[0],
                             expected_new_type=TEST_INSTANCE_TYPES[1]) 
Example #13
Source File: test_action.py    From aws-ops-automator with Apache License 2.0 5 votes vote down vote up
def test_create_snapshots_volume_with_tagfilter(self):

        try:
            parameters = {
                create_snapshot_action.PARAM_BACKUP_DATA_DEVICES: True,
                create_snapshot_action.PARAM_BACKUP_ROOT_DEVICE: False,
                create_snapshot_action.PARAM_VOLUME_TAG_FILTER: "Backup=True",
            }

            test_method = inspect.stack()[0][3]

            from datetime import timedelta
            self.logger.test("Running task")
            self.task_runner.run(parameters,
                                 task_name=test_method,
                                 complete_check_polling_interval=15)
            self.assertTrue(self.task_runner.success(), "Task executed successfully")
            self.logger.test("[X] Task completed")

            self.logger.test("Checking data snapshot")
            volume_snapshots = getattr(self.task_runner.results[0], "ActionStartResult", {}).get("volumes", {})
            self.created_snapshots = [volume_snapshots[i]["snapshot"] for i in volume_snapshots]
            self.assertEqual(1, len(volume_snapshots), "[X] Volume snapshots created")

            snapshot = self.ec2.get_snapshot(self.created_snapshots[0])
            snapshot_volume_tags = self.ec2.get_volume_tags(snapshot["VolumeId"])
            self.assertTrue(TagFilterExpression("Backup=True").is_match(snapshot_volume_tags),
                            "Snapshot is for selected data volume")
            self.logger.test("[X] Snapshot is for selected data volumes")

        finally:
            self.delete_snapshots() 
Example #14
Source File: test_action.py    From aws-ops-automator with Apache License 2.0 5 votes vote down vote up
def test_load_balanced_same_size_no_replace(self):
        self.do_test_replace(test_method=inspect.stack()[0][3],
                             load_balancing=True,
                             same_type=True,
                             mode=replace_instance.REPLACE_BY_SPECIFIED_TYPE,
                             replace_if_same_type=False,
                             expected_new_type=TEST_INSTANCE_TYPES[0]) 
Example #15
Source File: test_action.py    From aws-ops-automator with Apache License 2.0 5 votes vote down vote up
def test_load_balanced_same_size_replace(self):
        self.do_test_replace(test_method=inspect.stack()[0][3],
                             load_balancing=True,
                             same_type=True,
                             mode=replace_instance.REPLACE_BY_SPECIFIED_TYPE,
                             replace_if_same_type=False,
                             expected_new_type=TEST_INSTANCE_TYPES[0]) 
Example #16
Source File: test_action.py    From aws-ops-automator with Apache License 2.0 5 votes vote down vote up
def test_create_snapshots_root_volume_only(self):

        try:
            parameters = {
                create_snapshot_action.PARAM_BACKUP_DATA_DEVICES: False,
                create_snapshot_action.PARAM_BACKUP_ROOT_DEVICE: True,
            }

            test_method = inspect.stack()[0][3]

            self.logger.test("Running task")
            self.task_runner.run(parameters, task_name=test_method, complete_check_polling_interval=15)
            self.assertTrue(self.task_runner.success(), "Task executed successfully")
            self.logger.test("[X] Task completed")

            self.logger.test("Checking root volume snapshot")
            volume_snapshots = getattr(self.task_runner.results[0], "ActionStartResult", {}).get("volumes", {})
            self.created_snapshots = [volume_snapshots[i]["snapshot"] for i in volume_snapshots]
            self.assertEqual(1, len(volume_snapshots), "[X] Single snapshot created")

            snapshot = self.ec2.get_snapshot(snapshot_id=self.created_snapshots[0])
            self.assertEqual(self.root_volume, snapshot["VolumeId"], "Snapshot is for root volume")
            self.logger.test("[X] Snapshot is for root device created")

        finally:
            self.delete_snapshots() 
Example #17
Source File: test_action.py    From aws-ops-automator with Apache License 2.0 5 votes vote down vote up
def test_create_snapshots_data_volumes_only(self):

        try:
            parameters = {
                create_snapshot_action.PARAM_BACKUP_DATA_DEVICES: True,
                create_snapshot_action.PARAM_BACKUP_ROOT_DEVICE: False,
            }

            test_method = inspect.stack()[0][3]

            self.logger.test("Running task")
            self.task_runner.run(parameters, task_name=test_method, complete_check_polling_interval=30)
            self.assertTrue(self.task_runner.success(), "Task executed successfully")
            self.logger.test("[X] Task completed")

            self.logger.test("Checking data volume snapshots")
            volume_snapshots = getattr(self.task_runner.results[0], "ActionStartResult", {}).get("volumes", {})
            self.created_snapshots = [volume_snapshots[i]["snapshot"] for i in volume_snapshots]
            self.assertEqual(2, len(volume_snapshots), "[X] Data volume snapshots created")

            for snapshot_id in self.created_snapshots:
                snapshot = self.ec2.get_snapshot(snapshot_id=snapshot_id)
                self.assertIn(snapshot["VolumeId"], self.data_volumes, "Snapshot is for data volume")
            self.logger.test("[X] Snapshots are for data volumes")

        finally:
            self.delete_snapshots() 
Example #18
Source File: test_action.py    From aws-ops-automator with Apache License 2.0 5 votes vote down vote up
def test_instance_cpu_load(self):

        self.base_test(test_method=inspect.stack()[0][3],
                       instance_id=self.instance_cpu_load,
                       expect_under_utilized=False,
                       expect_over_utilized=True,
                       cpu_low=5,
                       cpu_high=10,
                       interval="0/{} * * * ?".format(INTERVAL_MINUTES)) 
Example #19
Source File: test_action.py    From aws-ops-automator with Apache License 2.0 5 votes vote down vote up
def test_copy_snapshot_description(self):

        test_method = inspect.stack()[0][3]

        self.cleanup_leftover_source_snapshots(test_method)

        self.logger.test("Creating source snapshot")
        source_snapshot = self.ec2.create_snapshot(self.volume_unencrypted,
                                                   tags={
                                                       "Name": "Ec2CopySnapshot_{}".format(test_method),
                                                       tasklist_tagname(TESTED_ACTION): test_method
                                                   }, description="Snapshot for testing Ec2CopySnapshot : {}".format(test_method))
        self.snapshots.append(source_snapshot["SnapshotId"])

        parameters = {
            copy_snapshot.PARAM_DESTINATION_REGION: region(),
            copy_snapshot.PARAM_COPIED_SNAPSHOTS: copy_snapshot.COPIED_OWNED_BY_ACCOUNT,
            copy_snapshot.PARAM_DELETE_AFTER_COPY: False,
            copy_snapshot.PARAM_ENCRYPTED: False
        }

        self.logger.test("Running task")
        self.task_runner.run(parameters,
                             task_name=test_method,
                             complete_check_polling_interval=10)
        self.assertTrue(self.task_runner.success(), "Task executed successfully")
        snapshot_copy_id = self.task_runner.results[0].result["copy-snapshot-id"]
        self.snapshots.append(snapshot_copy_id)
        self.logger.test("[X] Task completed")

        copied_snapshot = self.ec2.get_snapshot(snapshot_copy_id)
        self.assertEqual(source_snapshot.get("Description"), copied_snapshot.get("Description"), "Description copied as default")
        self.logger.test("[X]Source description copied") 
Example #20
Source File: test_action.py    From aws-ops-automator with Apache License 2.0 5 votes vote down vote up
def test_scale_up_already_largest(self):
        largest_size = TEST_INSTANCE_TYPES[-1]
        self.do_test_replace(test_method=inspect.stack()[0][3],
                             load_balancing=False,
                             mode=replace_instance.REPLACE_BY_STEP,
                             tags={"scaling": "up"},
                             multiple_volumes=False,
                             replaced_type=largest_size,
                             expected_new_type=largest_size) 
Example #21
Source File: test_action.py    From aws-ops-automator with Apache License 2.0 5 votes vote down vote up
def test_scale_up(self):
        self.do_test_replace(test_method=inspect.stack()[0][3],
                             load_balancing=False,
                             mode=replace_instance.REPLACE_BY_STEP,
                             tags={"scaling": "up"},
                             multiple_volumes=False,
                             expected_new_type=TEST_INSTANCE_TYPES[2]) 
Example #22
Source File: test_action.py    From aws-ops-automator with Apache License 2.0 5 votes vote down vote up
def test_scale_down(self):
        self.do_test_replace(test_method=inspect.stack()[0][3],
                             load_balancing=False,
                             mode=replace_instance.REPLACE_BY_STEP,
                             tags={"scaling": "down"},
                             multiple_volumes=False,
                             expected_new_type=TEST_INSTANCE_TYPES[0]) 
Example #23
Source File: test_action.py    From aws-ops-automator with Apache License 2.0 5 votes vote down vote up
def test_step_down_alternative_type(self):
        self.do_test_resize(test_method=inspect.stack()[0][3],
                            resize_mode=r.RESIZE_BY_STEP,
                            tags={"scaling": "down"},
                            start_type=TEST_INSTANCE_TYPES[2],
                            try_next_in_range=True,
                            scaling_range=TEST_INSTANCE_TYPES,
                            unavailable_types=TEST_INSTANCE_TYPES[1:2],
                            expected_type=TEST_INSTANCE_TYPES[0]) 
Example #24
Source File: test_action.py    From aws-ops-automator with Apache License 2.0 5 votes vote down vote up
def test_step_down_already_at_smallest(self):
        smallest_size = TEST_INSTANCE_TYPES[0]
        self.do_test_resize(test_method=inspect.stack()[0][3],
                            resize_mode=r.RESIZE_BY_STEP,
                            tags={"scaling": "down"},
                            start_type=smallest_size,
                            scaling_range=TEST_INSTANCE_TYPES,
                            expected_type=smallest_size) 
Example #25
Source File: test_action.py    From aws-ops-automator with Apache License 2.0 5 votes vote down vote up
def test_step_down(self):
        self.do_test_resize(test_method=inspect.stack()[0][3],
                            resize_mode=r.RESIZE_BY_STEP,
                            tags={"scaling": "down"},
                            start_type=TEST_INSTANCE_TYPES[1],
                            scaling_range=TEST_INSTANCE_TYPES,
                            expected_type=TEST_INSTANCE_TYPES[0]) 
Example #26
Source File: test_action.py    From aws-ops-automator with Apache License 2.0 5 votes vote down vote up
def test_up_not_assumed(self):
        self.do_test_resize(test_method=inspect.stack()[0][3],
                            resize_mode=r.RESIZE_BY_STEP,
                            tags={"scaling": "up"},
                            start_type="t2.nano",
                            scaling_range=TEST_INSTANCE_TYPES,
                            expected_type="t2.nano") 
Example #27
Source File: test_action.py    From aws-ops-automator with Apache License 2.0 5 votes vote down vote up
def test_step_up_no_next(self):
        self.do_test_resize(test_method=inspect.stack()[0][3],
                            resize_mode=r.RESIZE_BY_STEP,
                            tags={"scaling": "up"},
                            start_type=TEST_INSTANCE_TYPES[0],
                            try_next_in_range=False,
                            scaling_range=TEST_INSTANCE_TYPES,
                            unavailable_types=TEST_INSTANCE_TYPES[1:2],
                            expected_type=TEST_INSTANCE_TYPES[0]) 
Example #28
Source File: test_action.py    From aws-ops-automator with Apache License 2.0 5 votes vote down vote up
def test_step_up_no_alternative_avail(self):

        self.do_test_resize(test_method=inspect.stack()[0][3],
                            resize_mode=r.RESIZE_BY_STEP,
                            tags={"scaling": "up"},
                            start_type=TEST_INSTANCE_TYPES[0],
                            try_next_in_range=True,
                            scaling_range=TEST_INSTANCE_TYPES,
                            unavailable_types=TEST_INSTANCE_TYPES[1:],
                            expected_type=TEST_INSTANCE_TYPES[0]) 
Example #29
Source File: test_action.py    From aws-ops-automator with Apache License 2.0 5 votes vote down vote up
def test_step_up_alternative_type(self):

        self.do_test_resize(test_method=inspect.stack()[0][3],
                            resize_mode=r.RESIZE_BY_STEP,
                            tags={"scaling": "up"},
                            start_type=TEST_INSTANCE_TYPES[0],
                            try_next_in_range=True,
                            scaling_range=TEST_INSTANCE_TYPES,
                            unavailable_types=TEST_INSTANCE_TYPES[1:2],
                            expected_type=TEST_INSTANCE_TYPES[2]) 
Example #30
Source File: test_action.py    From aws-ops-automator with Apache License 2.0 5 votes vote down vote up
def test_step_up_already_at_largest(self):
        self.do_test_resize(test_method=inspect.stack()[0][3],
                            resize_mode=r.RESIZE_BY_STEP,
                            tags={"scaling": "up"},
                            start_type=TEST_INSTANCE_TYPES[-1],
                            scaling_range=TEST_INSTANCE_TYPES,
                            expected_type=TEST_INSTANCE_TYPES[-1])