Python pytest.fixture() Examples

The following are 30 code examples of pytest.fixture(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module pytest , or try the search function .
Example #1
Source File: test_schema_validation.py    From drydock with Apache License 2.0 7 votes vote down vote up
def _test_validate(self, schema, expect_failure, input_files, input):
        """validates input yaml against schema.

        :param schema: schema yaml file
        :param expect_failure: should the validation pass or fail.
        :param input_files: pytest fixture used to access the test input files
        :param input: test input yaml doc filename"""
        schema_dir = pkg_resources.resource_filename('drydock_provisioner',
                                                     'schemas')
        schema_filename = os.path.join(schema_dir, schema)
        schema_file = open(schema_filename, 'r')
        schema = yaml.safe_load(schema_file)

        input_file = input_files.join(input)
        instance_file = open(str(input_file), 'r')
        instance = yaml.safe_load(instance_file)

        if expect_failure:
            with pytest.raises(ValidationError):
                jsonschema.validate(instance['spec'], schema['data'])
        else:
            jsonschema.validate(instance['spec'], schema['data']) 
Example #2
Source File: conftest.py    From mutatest with MIT License 6 votes vote down vote up
def mock_coverage_file(tmp_path_factory):
    """Mock .coverage file to read into the CoverageOptimizer."""

    folder = tmp_path_factory.mktemp("cov")

    # aligned to fixture mock_source_and_targets for file3.py used in positive filter.
    mock_contents = {
        "file1.py": [1],
        "file2.py": [1, 3, 4],
        "file3.py": [1, 2, 4],
    }

    mock_cov_file = folder / ".coverage"
    str_cov_file = str(mock_cov_file.resolve())
    write_cov_file(mock_contents, str_cov_file)

    yield mock_cov_file

    mock_cov_file.unlink() 
Example #3
Source File: conftest.py    From mutatest with MIT License 6 votes vote down vote up
def mock_source_and_targets():
    """Mock source file with uncovered/covered targets to use with mock_coverage_file.

    Covered lines include: 1, 2, 4
    """
    # see mock_coverage_file fixture
    source_file = Path("file3.py")
    targets = {
        LocIndex(ast_class="AugAssign", lineno=1, col_offset=1, op_type="o"),
        LocIndex(ast_class="AugAssign", lineno=2, col_offset=1, op_type="o"),
        LocIndex(ast_class="AugAssign", lineno=3, col_offset=1, op_type="o"),
        LocIndex(ast_class="BinOp", lineno=4, col_offset=1, op_type="o"),
        LocIndex(ast_class="BinOp", lineno=5, col_offset=1, op_type="o"),
    }
    return SourceAndTargets(source_file, targets)


####################################################################################################
# TRANSFORMERS: AUGASSIGN FIXTURES
#################################################################################################### 
Example #4
Source File: test_api_builddata.py    From drydock with Apache License 2.0 6 votes vote down vote up
def test_read_builddata_all(self, falcontest, seeded_builddata):
        """Test that by default the API returns all build data for a node."""
        url = '/api/v1.0/nodes/foo/builddata'

        # Seed the database with build data
        nodelist = ['foo']
        count = 3
        seeded_builddata(nodelist=nodelist, count=count)

        # TODO(sh8121att) Make fixture for request header forging
        hdr = {
            'Content-Type': 'application/json',
            'X-IDENTITY-STATUS': 'Confirmed',
            'X-USER-NAME': 'Test',
            'X-ROLES': 'admin'
        }

        resp = falcontest.simulate_get(url, headers=hdr)

        assert resp.status == falcon.HTTP_200

        resp_body = resp.json

        assert len(resp_body) == count 
Example #5
Source File: test_api_tasks.py    From drydock with Apache License 2.0 6 votes vote down vote up
def test_create_task(self, falcontest, blank_state):
        url = '/api/v1.0/tasks'

        req_hdr = {
            'Content-Type': 'application/json',
            'X-IDENTITY-STATUS': 'Confirmed',
            'X-USER-NAME': 'Test',
            'X-ROLES': 'admin',
        }

        json_body = json.dumps({
            'action': 'verify_site',
            'design_ref': 'http://foo.com',
        })

        resp = falcontest.simulate_post(url, headers=req_hdr, body=json_body)

        assert resp.status == falcon.HTTP_201
        assert resp.headers.get('Location') is not None

    # TODO(sh8121att) Make this a general fixture in conftest.py 
Example #6
Source File: conftest.py    From gql with MIT License 6 votes vote down vote up
def ws_ssl_server(request):
    """Websockets server fixture using SSL.

    It can take as argument either a handler function for the websocket server for
    complete control OR an array of answers to be sent by the default server handler.
    """

    server_handler = get_server_handler(request)

    try:
        test_server = WebSocketServer(with_ssl=True)

        # Starting the server with the fixture param as the handler function
        await test_server.start(server_handler)

        yield test_server
    except Exception as e:
        print("Exception received in ws server fixture:", e)
    finally:
        await test_server.stop() 
Example #7
Source File: conftest.py    From gql with MIT License 6 votes vote down vote up
def server(request):
    """Fixture used to start a dummy server to test the client behaviour.

    It can take as argument either a handler function for the websocket server for
    complete control OR an array of answers to be sent by the default server handler.
    """

    server_handler = get_server_handler(request)

    try:
        test_server = WebSocketServer()

        # Starting the server with the fixture param as the handler function
        await test_server.start(server_handler)

        yield test_server
    except Exception as e:
        print("Exception received in server fixture:", e)
    finally:
        await test_server.stop() 
Example #8
Source File: conftest.py    From goodtables-py with MIT License 6 votes vote down vote up
def log():
    def fixture(struct):
        # Pack errors/report to tuples list log:
        # - format for errors: (row-number, column-number, code)
        # - format for report: (table-number, row-number, column-number, code)
        result = []
        def pack_error(error, table_number='skip'):
            error = dict(error)
            error = [
                error.get('row-number'),
                error.get('column-number'),
                error.get('code'),
            ]
            if table_number != 'skip':
                error = [table_number] + error
            return tuple(error)
        if isinstance(struct, list):
            for error in struct:
                result.append(pack_error(error))
        if isinstance(struct, dict):
            for table_number, table in enumerate(struct['tables'], start=1):
                for error in table['errors']:
                    result.append(pack_error(error, table_number))
        return result
    return fixture 
Example #9
Source File: test_course_monthly_metrics_viewset.py    From figures with MIT License 6 votes vote down vote up
def sog_data():
    """Fixture to create site, organization, and course overview

    This fixture exists mostly to help abstract multisite handing from tests

    Returns a dict of 'site', 'organization', and 'course_overview' objects
    """
    site = SiteFactory()
    course_overview = CourseOverviewFactory()
    if organizations_support_sites():
        organization = OrganizationFactory(sites=[site])
    else:
        organization = OrganizationFactory()
    OrganizationCourseFactory(organization=organization,
                              course_id=str(course_overview.id))
    return dict(
        site=site,
        organization=organization,
        course_overview=course_overview) 
Example #10
Source File: test_general_course_data_view.py    From figures with MIT License 6 votes vote down vote up
def test_get_with_course_id_for_other_site(self):
        """
        This tests if the course can't be found in the organization

        This test is incomplete
        """
        with mock.patch('figures.helpers.settings.FEATURES', {'FIGURES_IS_MULTISITE': True}):
            assert figures.helpers.is_multisite()

            # Stand up other site. Candidate for a fixture
            other_site = SiteFactory(domain='other.site')
            other_org = OrganizationFactory(sites=[other_site])
            course = CourseOverviewFactory(org=other_org.short_name)

            request = APIRequestFactory().get(self.request_path)
            force_authenticate(request, user=self.staff_user)
            view = self.view_class.as_view({'get': 'retrieve'})
            response = view(request, pk=str(course.id))
            assert response.status_code == 403 
Example #11
Source File: test_freezegun.py    From pytest-freezegun with MIT License 6 votes vote down vote up
def test_freezing_time_in_fixture(testdir):
    testdir.makepyfile("""
        import pytest
        from datetime import date, datetime

        @pytest.fixture
        def today():
            return datetime.now().date()

        @pytest.mark.freeze_time('2017-05-20 15:42')
        def test_sth(today):
            assert today == date(2017, 5, 20)
    """)

    result = testdir.runpytest('-v', '-s')
    assert result.ret == 0 
Example #12
Source File: test_qtutils.py    From qutebrowser with GNU General Public License v3.0 6 votes vote down vote up
def test_version_check(monkeypatch, qversion, compiled, pyqt, version, exact,
                       expected):
    """Test for version_check().

    Args:
        monkeypatch: The pytest monkeypatch fixture.
        qversion: The version to set as fake qVersion().
        compiled: The value for QT_VERSION_STR (set compiled=False)
        pyqt: The value for PYQT_VERSION_STR (set compiled=False)
        version: The version to compare with.
        exact: Use exact comparing (==)
        expected: The expected result.
    """
    monkeypatch.setattr(qtutils, 'qVersion', lambda: qversion)
    if compiled is not None:
        monkeypatch.setattr(qtutils, 'QT_VERSION_STR', compiled)
        monkeypatch.setattr(qtutils, 'PYQT_VERSION_STR', pyqt)
        compiled_arg = True
    else:
        compiled_arg = False

    actual = qtutils.version_check(version, exact, compiled=compiled_arg)
    assert actual == expected 
Example #13
Source File: conftest.py    From normandy with Mozilla Public License 2.0 6 votes vote down vote up
def migrations(transactional_db):
    """
    This fixture returns a helper object to test Django data migrations.
    Based on: https://gist.github.com/bennylope/82a6088c02fefdd47e18f3c04ec167af
    """

    class Migrator(object):
        def migrate(self, app, to):
            migration = [(app, to)]
            executor = MigrationExecutor(connection)
            executor.migrate(migration)
            return executor.loader.project_state(migration).apps

        def reset(self):
            call_command("migrate", no_input=True)

    return Migrator() 
Example #14
Source File: conftest.py    From brownie with MIT License 6 votes vote down vote up
def pytest_addoption(parser):
    parser.addoption(
        "--target",
        choices=["core", "pm", "plugin"],
        default="core",
        help="Target a specific component of the tests.",
    )
    parser.addoption(
        "--evm",
        nargs=3,
        metavar=("solc_versions", "evm_rulesets", "optimizer_runs"),
        help="Run evm tests against a matrix of solc versions, evm versions, and compiler runs.",
    )


# remove tests based on config flags and fixture names 
Example #15
Source File: test_coverage.py    From brownie with MIT License 6 votes vote down vote up
def test_always_transact(plugintester, mocker, rpc):
    mocker.spy(rpc, "undo")

    result = plugintester.runpytest()
    result.assert_outcomes(passed=1)
    assert rpc.undo.call_count == 0

    # with coverage eval
    result = plugintester.runpytest("--coverage")
    result.assert_outcomes(passed=1)
    assert rpc.undo.call_count == 1

    # with coverage and no_call_coverage fixture
    plugintester.makeconftest(conf_source)
    result = plugintester.runpytest("--coverage")
    result.assert_outcomes(passed=1)
    assert rpc.undo.call_count == 1 
Example #16
Source File: test_moler_test.py    From moler with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def test_exception_in_observer_is_raised_if_no_result_called_but_decorator_on_method(do_nothing_connection_observer,
                                                                                     ObserverExceptionClass):
    from moler.util.moler_test import MolerTest
    exc = ObserverExceptionClass("some error inside observer")

    class MyTest(object):
        @MolerTest.raise_background_exceptions()
        # @MolerTest.raise_background_exceptions  # doesn't work since it is created by python and given class as first argument
        #                                               # compare with syntax of @pytest.fixture  @pytest.yield_fixture
        def method_using_observer(self):
            observer = do_nothing_connection_observer
            observer.set_exception(exc)

    with pytest.raises(ExecutionException) as err:
        MyTest().method_using_observer()
    ConnectionObserver.get_unraised_exceptions() 
Example #17
Source File: test_shape_base.py    From recruit with Apache License 2.0 6 votes vote down vote up
def block(self, request):
        # blocking small arrays and large arrays go through different paths.
        # the algorithm is triggered depending on the number of element
        # copies required.
        # We define a test fixture that forces most tests to go through
        # both code paths.
        # Ultimately, this should be removed if a single algorithm is found
        # to be faster for both small and large arrays.
        def _block_force_concatenate(arrays):
            arrays, list_ndim, result_ndim, _ = _block_setup(arrays)
            return _block_concatenate(arrays, list_ndim, result_ndim)

        def _block_force_slicing(arrays):
            arrays, list_ndim, result_ndim, _ = _block_setup(arrays)
            return _block_slicing(arrays, list_ndim, result_ndim)

        if request.param == 'force_concatenate':
            return _block_force_concatenate
        elif request.param == 'force_slicing':
            return _block_force_slicing
        elif request.param == 'block':
            return block
        else:
            raise ValueError('Unknown blocking request. There is a typo in the tests.') 
Example #18
Source File: test_cli.py    From mutatest with MIT License 5 votes vote down vote up
def mock_TrialTimes():
    """Mock Trial Time fixture for the CLI."""
    return TrialTimes(
        clean_trial_1=timedelta(days=0, seconds=6, microseconds=0),
        clean_trial_2=timedelta(days=0, seconds=6, microseconds=0),
        mutation_trials=timedelta(days=0, seconds=6, microseconds=0),
    ) 
Example #19
Source File: conftest.py    From mutatest with MIT License 5 votes vote down vote up
def mock_args(tmp_path_factory, binop_file):
    """Basic fixture with default settings using existing binop_file fixture."""

    folder = tmp_path_factory.mktemp("output")

    return MockArgs(
        blacklist=[],
        exclude=["__init__.py"],
        mode="s",
        nlocations=10,
        output=folder / "mock_mutation_report.rst",
        rseed=314,
        src=binop_file,
        testcmds=["pytest"],
        whitelist=[],
        exception=None,
        debug=False,
        nocov=True,
        parallel=False,
        timeout_factor=2,
    )


####################################################################################################
# FILTERS: MOCK COVERAGE FILE FIXTURES
#################################################################################################### 
Example #20
Source File: conftest.py    From mutatest with MIT License 5 votes vote down vote up
def augassign_expected_locs():
    """The AugAssign expected location based on the fixture"""
    return [
        LocIndex(ast_class="AugAssign", lineno=2, col_offset=4, op_type="AugAssign_Add"),
        LocIndex(ast_class="AugAssign", lineno=3, col_offset=4, op_type="AugAssign_Sub"),
        LocIndex(ast_class="AugAssign", lineno=4, col_offset=4, op_type="AugAssign_Div"),
        LocIndex(ast_class="AugAssign", lineno=5, col_offset=4, op_type="AugAssign_Mult"),
    ]


####################################################################################################
# TRANSFORMERS: BINOP FIXTURES
# Used as a baseline fixture in many running tests
#################################################################################################### 
Example #21
Source File: conftest.py    From mutatest with MIT License 5 votes vote down vote up
def boolop_expected_loc():
    """Expected location index of the boolop fixture"""
    # Py 3.7 vs 3.8
    end_lineno = None if sys.version_info < (3, 8) else 2
    end_col_offset = None if sys.version_info < (3, 8) else 18
    return LocIndex(
        ast_class="BoolOp",
        lineno=2,
        col_offset=11,
        op_type=ast.And,
        end_lineno=end_lineno,
        end_col_offset=end_col_offset,
    ) 
Example #22
Source File: conftest.py    From mutatest with MIT License 5 votes vote down vote up
def compare_expected_locs():
    """The compare expected locations based on the fixture"""
    # Py 3.7
    if sys.version_info < (3, 8):
        return [
            LocIndex(ast_class="Compare", lineno=2, col_offset=11, op_type=ast.Eq),
            LocIndex(ast_class="CompareIs", lineno=5, col_offset=11, op_type=ast.Is),
            LocIndex(ast_class="CompareIn", lineno=8, col_offset=11, op_type=ast.In),
        ]
    # Py 3.8
    return [
        LocIndex(
            ast_class="Compare",
            lineno=2,
            col_offset=11,
            op_type=ast.Eq,
            end_lineno=2,
            end_col_offset=17,
        ),
        LocIndex(
            ast_class="CompareIs",
            lineno=5,
            col_offset=11,
            op_type=ast.Is,
            end_lineno=5,
            end_col_offset=17,
        ),
        LocIndex(
            ast_class="CompareIn",
            lineno=8,
            col_offset=11,
            op_type=ast.In,
            end_lineno=8,
            end_col_offset=17,
        ),
    ]


####################################################################################################
# TRANSFORMERS: IF FIXTURES
#################################################################################################### 
Example #23
Source File: test_run.py    From mutatest with MIT License 5 votes vote down vote up
def test_run_mutation_trials_good_binop(
    bos, bod, exp_trials, parallel, single_binop_file_with_good_test, change_to_tmp
):
    """Slow test to run detection trials on a simple mutation on a binop.

    Based on fixture, there is one Add operation, with 6 substitutions e.g.
    sub, div, mult, pow, mod, floordiv, therefore, 6 total trials are expected for a full run
    and 1 trial is expected when break on detected is used.

    Args:
        bos: break on survival
        bod: break on detection
        exp_trials: number of expected trials
        single_binop_file_with_good_test: fixture for single op with a good test
    """
    if sys.version_info < (3, 8) and parallel:
        pytest.skip("Under version 3.8 will not run parallel tests.")

    test_cmds = f"pytest {single_binop_file_with_good_test.test_file.resolve()}".split()

    config = Config(
        n_locations=100, break_on_survival=bos, break_on_detected=bod, multi_processing=parallel
    )

    results_summary = run.run_mutation_trials(
        single_binop_file_with_good_test.src_file.resolve(), test_cmds=test_cmds, config=config
    )

    assert len(results_summary.results) == exp_trials

    # in all trials the status should be detected
    for mutant_trial in results_summary.results:
        assert mutant_trial.return_code == 1
        assert mutant_trial.status == "DETECTED" 
Example #24
Source File: test_run.py    From mutatest with MIT License 5 votes vote down vote up
def test_run_mutation_trials_bad_binop(
    bos, bod, exp_trials, parallel, single_binop_file_with_bad_test, change_to_tmp
):
    """Slow test to run detection trials on a simple mutation on a binop.

    Based on fixture, there is one Add operation, with 6 substitutions e.g.
    sub, div, mult, pow, mod, floordiv, therefore, 6 total trials are expected for a full run
    and 1 trial is expected when break on detected is used.

    Args:
        bos: break on survival
        bod: break on detection
        exp_trials: number of expected trials
        single_binop_file_with_good_test: fixture for single op with a good test
    """
    if sys.version_info < (3, 8) and parallel:
        pytest.skip("Under version 3.8 will not run parallel tests.")

    test_cmds = f"pytest {single_binop_file_with_bad_test.test_file.resolve()}".split()

    config = Config(
        n_locations=100, break_on_survival=bos, break_on_detected=bod, multi_processing=parallel
    )

    results_summary = run.run_mutation_trials(
        single_binop_file_with_bad_test.src_file.resolve(), test_cmds=test_cmds, config=config
    )

    assert len(results_summary.results) == exp_trials

    # in all trials the status should be survivors
    for mutant_trial in results_summary.results:
        assert mutant_trial.return_code == 0
        assert mutant_trial.status == "SURVIVED" 
Example #25
Source File: test_api_tasks.py    From drydock with Apache License 2.0 5 votes vote down vote up
def test_read_tasks(self, falcontest, blank_state):
        """Test that the tasks API responds with list of tasks."""
        url = '/api/v1.0/tasks'

        # TODO(sh8121att) Make fixture for request header forging
        hdr = {
            'Content-Type': 'application/json',
            'X-IDENTITY-STATUS': 'Confirmed',
            'X-USER-NAME': 'Test',
            'X-ROLES': 'admin'
        }

        resp = falcontest.simulate_get(url, headers=hdr)

        assert resp.status == falcon.HTTP_200 
Example #26
Source File: conftest.py    From gql with MIT License 5 votes vote down vote up
def client_and_server(server):
    """Helper fixture to start a server and a client connected to its port."""

    # Generate transport to connect to the server fixture
    path = "/graphql"
    url = f"ws://{server.hostname}:{server.port}{path}"
    sample_transport = WebsocketsTransport(url=url)

    async with Client(transport=sample_transport) as session:

        # Yield both client session and server
        yield session, server 
Example #27
Source File: test_ffmpeg_adapter.py    From spleeter with MIT License 5 votes vote down vote up
def adapter():
    """ Target test audio adapter fixture. """
    return get_default_audio_adapter() 
Example #28
Source File: test_ffmpeg_adapter.py    From spleeter with MIT License 5 votes vote down vote up
def audio_data(adapter):
    """ Audio data fixture based on sample loading from adapter. """
    return adapter.load(
        TEST_AUDIO_DESCRIPTOR,
        TEST_OFFSET,
        TEST_DURATION,
        TEST_SAMPLE_RATE) 
Example #29
Source File: test_cookiecutter_generation.py    From cookiecutter-faust with MIT License 5 votes vote down vote up
def test_project_generation(cookies, context, context_combination):
    """
    Test that project is generated and fully rendered.
    This is parametrized for each combination from ``context_combination``
    fixture
    """
    result = cookies.bake(extra_context={**context, **context_combination})
    assert result.exit_code == 0
    assert result.exception is None
    assert result.project.basename == context["project_slug"]
    assert result.project.isdir()

    paths = build_files_list(str(result.project))
    assert paths
    check_paths(paths) 
Example #30
Source File: test_course_monthly_metrics_viewset.py    From figures with MIT License 5 votes vote down vote up
def course_test_data():
    """Temporary fixture. Will remove as we abstract testing
    """
    months_back = 6
    site = SiteFactory()
    course_overview = CourseOverviewFactory()
    if organizations_support_sites():
        org = OrganizationFactory(sites=[site])
    else:
        org = OrganizationFactory()

    OrganizationCourseFactory(organization=org,
                              course_id=str(course_overview.id))

    enrollments = [CourseEnrollmentFactory(
        course_id=course_overview.id) for i in range(3)]

    users = [enrollment.user for enrollment in enrollments]
    student_modules = []
    dates = generate_date_series(months_back=months_back)
    assert dates
    data_spec = zip(dates, range(months_back))

    return dict(
        site=site,
        org=org,
        users=users,
        course_overview=course_overview,
        enrollments=enrollments,
        student_modules=student_modules,
        months_back=months_back,
        dates=dates,
        data_spec=data_spec,
    )