diff --git a/CHANGELOG.md b/CHANGELOG.md
index f6e1a80b34947f6fa88a7fcd3536ff11e443c484..c48b3eb06bff69f59590bc226e48908a8c6411f1 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,7 @@
 
 ### Added
 
+- Added support for repository rulesets. ([#53](https://gitlab.eclipse.org/eclipsefdn/security/otterdog/-/issues/53))
 - Added support for workflow settings of a repository. ([#113](https://gitlab.eclipse.org/eclipsefdn/security/otterdog/-/issues/113))
 - Added possibility to define custom hooks in the default configuration when adding new resource. ([#106](https://gitlab.eclipse.org/eclipsefdn/security/otterdog/-/issues/106))
 - Added validation for repos hosting the organization site, i.e. <org-id>.github.io. ([#83](https://gitlab.eclipse.org/eclipsefdn/security/otterdog/-/issues/83))
diff --git a/docs/reference/organization/repository/bypass-actor.md b/docs/reference/organization/repository/bypass-actor.md
new file mode 100644
index 0000000000000000000000000000000000000000..8d4724286716769e082516b6eb8a1928c7947960
--- /dev/null
+++ b/docs/reference/organization/repository/bypass-actor.md
@@ -0,0 +1,33 @@
+A Bypass Actor represents an entity that is allowed to bypass the restrictions setup by a specific ruleset.
+The following format is used to distinguish between the different types of actors that GitHub supports:
+
+| Actor Type | Format                        | Example                    |
+|------------|-------------------------------|----------------------------|
+| Role       | `#<role-name>`                | `#Maintain`                |
+| Team       | `@<organization>/<team-slug>` | `@OtterdogTest/committers` |
+| App        | `<app-slug>`                  | `eclipse-eca-validation`   |
+
+The following roles are currently supported:
+
+- Write
+- Maintain
+- RepositoryAdmin
+- OrganizationAdmin
+
+!!! note
+
+    Currently, GitHub does not support to specify individual users as a bypass actor for a ruleset.
+
+# Bypass Mode
+
+If not specified, the bypass mode will be set to `always` by default, meaning that the specified bypass actor
+can bypass the restrictions of the ruleset in any case.
+
+However, it is also possible to limit the ability to bypass the ruleset only for pull requests:
+
+`<actor>[:<bypass-mode>]?`
+
+| Bypass Mode    | Description                                                                      | Format                 | Example                                 |
+|----------------|----------------------------------------------------------------------------------|------------------------|-----------------------------------------|
+| _always_       | Allow the actor to always bypass the restrictions                                | `<actor>:always`       | `#Maintain:always`                      |
+| _pull_request_ | Allow the actor to only bypass the restrictions in the context of a pull request | `<actor>:pull_request` | `@OtterdogTest/committers:pull_request` |
diff --git a/docs/reference/organization/repository/ref-matcher.md b/docs/reference/organization/repository/ref-matcher.md
new file mode 100644
index 0000000000000000000000000000000000000000..018fc5f758efab98854a4d2a43f8be5f7f1c140a
--- /dev/null
+++ b/docs/reference/organization/repository/ref-matcher.md
@@ -0,0 +1,7 @@
+The following format is supported to match branches to be included / excluded in repository rulesets:
+
+| Description                 | Format              | Example                                         |
+|-----------------------------|---------------------|-------------------------------------------------|
+| match individual branch(es) | `refs/heads/<name>` | `refs/heads/main` or `refs/heads/releases/**/*` |
+| match the default branch    | `~DEFAULT_BRANCH`   | `~DEFAULT_BRANCH`                               |
+| match all branches          | `~ALL`              | `~ALL`                                          |
diff --git a/docs/reference/organization/repository/ruleset.md b/docs/reference/organization/repository/ruleset.md
new file mode 100644
index 0000000000000000000000000000000000000000..1a82860425e801817ce3ab106133e3ae591a25d8
--- /dev/null
+++ b/docs/reference/organization/repository/ruleset.md
@@ -0,0 +1,68 @@
+Definition of a Repository Ruleset, the following properties are supported:
+
+| Key                                 | Value                                  | Description                                                                                                                                                                              | Notes                                                                                                                             |
+|-------------------------------------|----------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------|
+| _name_                              | string                                 | The name of this repository ruleset                                                                                                                                                      |                                                                                                                                   |
+| _enforcement_                       | string                                 | The enforcement status of this ruleset                                                                                                                                                   | Possible values are `active`, `disabled` or `evaluate` (`evaluate` only available when enterprise billing is enabled)             |
+| _bypass_actors_                     | list\[[BypassActor](bypass-actor.md)\] | List of actors able to bypass this ruleset                                                                                                                                               |                                                                                                                                   |
+| _include_refs_                      | list\[[RefMatcher](ref-matcher.md)\]   | List of refs or patterns to include matching branches                                                                                                                                    |                                                                                                                                   |
+| _exclude_refs_                      | list\[[RefMatcher](ref-matcher.md)\]   | List of refs or patterns to exclude matching branches                                                                                                                                    |                                                                                                                                   |
+| _allows_creations_                  | boolean                                | If disabled, only allows users with bypass permission to create matching refs                                                                                                            | Only taken into account when `restricts_pushes` is enabled                                                                        |
+| _allows_deletions_                  | boolean                                | If disabled, only allows users with bypass permission to delete matching refs                                                                                                            |                                                                                                                                   |
+| _allows_updates_                    | boolean                                | If disabled, only allows users with bypass permission to push matching refs                                                                                                              | Only taken into account when `restricts_pushes` is enabled                                                                        |
+| _allows_force_pushes_               | boolean                                | If disabled, only allows users with bypass permission to force push matching refs                                                                                                        | This property takes precedence over `bypass_force_push_allowances` if enabled, allowing all users with write access to force push |
+| _requires_pull_request_             | boolean                                | If enabled, requires a pull request before merging. All commits must be made to a non-protected branch and submitted via a pull request before they can be merged into matching branches |                                                                                                                                   |
+| _required_approving_review_count_   | integer or null                        | If specified, pull requests targeting a matching branch require a number of approvals and no changes requested before they can be merged.                                                | Only taken into account when `requires_pull_request` is enabled, should be set to null when `requires_pull_request` is disabled   |
+| _dismisses_stale_reviews_           | boolean                                | If enabled, dismiss approved reviews automatically when a new commit is pushed                                                                                                           | Only taken into account when `requires_pull_request` is enabled                                                                   |
+| _requires_code_owner_review_        | boolean                                | If enabled, require an approved review in pull requests including files with a designated code owner                                                                                     | Only taken into account when `requires_pull_request` is enabled                                                                   |
+| _requires_last_push_approval_       | boolean                                | Whether the most recent push must be approved by someone other than the person who pushed it                                                                                             | Only taken into account when `requires_pull_request` is enabled                                                                   |
+| _requires_review_thread_resolution_ | boolean                                | If enabled, all conversations on code must be resolved before a pull request can be merged into a matching branch                                                                        | Only taken into account when `requires_pull_request` is enabled                                                                   |
+| _requires_status_checks_            | boolean                                | If enabled, status checks must pass before branches can be merged into a matching branch                                                                                                 |                                                                                                                                   |
+| _requires_strict_status_checks_     | boolean                                | If enabled, pull requests targeting a matching branch must have been tested with the latest code.                                                                                        | This setting will not take effect unless at least one status check is enabled                                                     |
+| _required_status_checks_            | list\[[StatusCheck](status-check.md)\] | List of status checks that must succeed before branches can be merged                                                                                                                    | Only taken into account when `requires_status_checks` is enabled                                                                  |
+| _requires_commit_signatures_        | boolean                                | If enabled, commits pushed to matching branches must have verified signatures                                                                                                            |                                                                                                                                   |
+| _requires_linear_history_           | boolean                                | If enabled, prevent merge commits from being pushed to matching branches                                                                                                                 |                                                                                                                                   |
+| _requires_deployments_              | boolean                                | If enabled, environments must be successfully deployed to before branches can be merged into a matching branch                                                                           |                                                                                                                                   |
+| _required_deployment_environments_  | list[string]                           | List of environments that must be successfully deployed to before branches can be merged                                                                                                 | Only taken into account when `requires_deployments` is enabled                                                                    |
+
+## Jsonnet Function
+
+``` jsonnet
+orgs.newRepoRuleset('<name>') {
+  <key>: <value>
+}
+```
+
+## Validation rules
+
+- setting `enforcement` to `evaluate` for an organization on a `free` plan triggers an error
+- enabling a setting that is only taken into account when another setting is enabled, triggers a warning, e.g. `dismisses_stale_reviews` is only valid when `requires_pull_request` is enabled
+- specifying an integer value for `required_approving_review_count` while `requires_pull_request` is disabled, triggers a warning, set it to `null` instead
+
+## Example usage
+
+=== "jsonnet"
+    ``` jsonnet
+    orgs.newOrg('adoptium') {
+      ...
+      _repositories+:: [
+        ...
+        orgs.newRepo('adoptium.net') {
+          description: "Adoptium Website",
+          homepage: "https://adoptium.net",
+          ...
+          rulesets: [
+            orgs.newRepoRuleset('main') {
+              bypass_actors+: [
+                "@adoptium/project-leads",
+              ],
+              include_refs+: [
+                "~DEFAULT_BRANCH"
+              ],
+              required_approving_review_count: 0,
+            },
+          ],
+        },
+      ],
+    }
+    ```
diff --git a/mkdocs.yml b/mkdocs.yml
index 86a2491bc58ab30ab0cf4965ea674f76212c7ce2..44c204f21afa37dd1b70baff443ef24325763e3d 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -58,6 +58,7 @@ nav:
           - Repository Secret: reference/organization/repository/secret.md
           - Environment: reference/organization/repository/environment.md
           - Branch Protection Rule: reference/organization/repository/branch-protection-rule.md
+          - Ruleset: reference/organization/repository/ruleset.md
     - CLI Operations:
         - reference/operations/index.md
         - Import: reference/operations/import.md
diff --git a/otterdog/jsonnet.py b/otterdog/jsonnet.py
index 46658ab84d9578b86fdd794f5cd25877e94b563a..c4179acfdc37462a52e42e526c7f2337fe1c9014 100644
--- a/otterdog/jsonnet.py
+++ b/otterdog/jsonnet.py
@@ -12,12 +12,7 @@ import subprocess
 
 from typing import Any
 
-from .utils import (
-    jsonnet_evaluate_snippet,
-    parse_template_url,
-    print_debug,
-    print_trace,
-)
+from .utils import jsonnet_evaluate_snippet, parse_template_url, print_debug, print_trace, print_warn
 
 
 class JsonnetConfig:
@@ -32,6 +27,7 @@ class JsonnetConfig:
     create_repo_webhook = "newRepoWebhook"
     create_repo_secret = "newRepoSecret"
     create_branch_protection_rule = "newBranchProtectionRule"
+    create_repo_ruleset = "newRepoRuleset"
     create_environment = "newEnvironment"
 
     def __init__(self, org_id: str, base_dir: str, base_template_url: str, local_only: bool):
@@ -57,7 +53,8 @@ class JsonnetConfig:
         self._default_repo_config: dict[str, Any] | None = None
         self._default_repo_webhook_config: dict[str, Any] | None = None
         self._default_repo_secret_config: dict[str, Any] | None = None
-        self._default_branch_config: dict[str, Any] | None = None
+        self._default_branch_protection_rule_config: dict[str, Any] | None = None
+        self._default_repo_ruleset_config: dict[str, Any] | None = None
         self._default_environment_config: dict[str, Any] | None = None
 
         self._initialized = False
@@ -85,50 +82,65 @@ class JsonnetConfig:
             # load the default org webhook config
             org_webhook_snippet = f"(import '{template_file}').{self.create_org_webhook}('default')"
             self._default_org_webhook_config = jsonnet_evaluate_snippet(org_webhook_snippet)
-        except RuntimeError as ex:
-            raise RuntimeError(f"failed to get default org webhook config: {ex}")
+        except RuntimeError:
+            print_warn("no default org webhook config found, webhooks will be skipped")
+            self._default_org_webhook_config = None
 
         try:
             # load the default org secret config
             org_secret_snippet = f"(import '{template_file}').{self.create_org_secret}('default')"
             self._default_org_secret_config = jsonnet_evaluate_snippet(org_secret_snippet)
-        except RuntimeError as ex:
-            raise RuntimeError(f"failed to get default org secret config: {ex}")
+        except RuntimeError:
+            print_warn("no default org secret config found, secrets will be skipped")
+            self._default_org_secret_config = None
 
         try:
             # load the default repo config
             repo_snippet = f"(import '{template_file}').{self.create_repo}('default')"
             self._default_repo_config = jsonnet_evaluate_snippet(repo_snippet)
-        except RuntimeError as ex:
-            raise RuntimeError(f"failed to get default repo config: {ex}")
+        except RuntimeError:
+            print_warn("no default repo config found, repos will be skipped")
+            self._default_repo_config = None
 
         try:
             # load the default repo webhook config
             repo_webhook_snippet = f"(import '{template_file}').{self.create_repo_webhook}('default')"
             self._default_repo_webhook_config = jsonnet_evaluate_snippet(repo_webhook_snippet)
-        except RuntimeError as ex:
-            raise RuntimeError(f"failed to get default repo webhook config: {ex}")
+        except RuntimeError:
+            print_warn("no default repo webhook config found, webhooks will be skipped")
+            self._default_repo_webhook_config = None
 
         try:
             # load the default repo secret config
             repo_secret_snippet = f"(import '{template_file}').{self.create_repo_secret}('default')"
             self._default_repo_secret_config = jsonnet_evaluate_snippet(repo_secret_snippet)
-        except RuntimeError as ex:
-            raise RuntimeError(f"failed to get default repo secret config: {ex}")
+        except RuntimeError:
+            print_warn("no default repo secret config found, secrets will be skipped")
+            self._default_repo_secret_config = None
 
         try:
             # load the default branch protection rule config
             branch_protection_snippet = f"(import '{template_file}').{self.create_branch_protection_rule}('default')"
-            self._default_branch_config = jsonnet_evaluate_snippet(branch_protection_snippet)
-        except RuntimeError as ex:
-            raise RuntimeError(f"failed to get default branch protection rule config: {ex}")
+            self._default_branch_protection_rule_config = jsonnet_evaluate_snippet(branch_protection_snippet)
+        except RuntimeError:
+            print_warn("no default branch protection rule config found, branch protection rules will be skipped")
+            self._default_branch_protection_rule_config = None
+
+        try:
+            # load the default repo ruleset config
+            branch_protection_snippet = f"(import '{template_file}').{self.create_repo_ruleset}('default')"
+            self._default_repo_ruleset_config = jsonnet_evaluate_snippet(branch_protection_snippet)
+        except RuntimeError:
+            print_warn("no default repo ruleset config found, rulesets will be skipped")
+            self._default_repo_ruleset_config = None
 
         try:
             # load the default environment config
             environment_snippet = f"(import '{template_file}').{self.create_environment}('default')"
             self._default_environment_config = jsonnet_evaluate_snippet(environment_snippet)
-        except RuntimeError as ex:
-            raise RuntimeError(f"failed to get default environment config: {ex}")
+        except RuntimeError:
+            print_warn("no default environment config found, environments will be skipped")
+            self._default_environment_config = None
 
         self._initialized = True
 
@@ -165,8 +177,12 @@ class JsonnetConfig:
         return self._default_repo_secret_config
 
     @property
-    def default_branch_config(self):
-        return self._default_branch_config
+    def default_branch_protection_rule_config(self):
+        return self._default_branch_protection_rule_config
+
+    @property
+    def default_repo_ruleset_config(self):
+        return self._default_repo_ruleset_config
 
     @property
     def default_environment_config(self):
diff --git a/otterdog/models/github_organization.py b/otterdog/models/github_organization.py
index 5b55138b3abff9fbe44177601a1ae43f3b79fbe1..fd6acb23b1a6f29611b71cf968aabb7d79725d06 100644
--- a/otterdog/models/github_organization.py
+++ b/otterdog/models/github_organization.py
@@ -40,6 +40,7 @@ from .organization_settings import OrganizationSettings
 from .organization_workflow_settings import OrganizationWorkflowSettings
 from .organization_webhook import OrganizationWebhook
 from .repository import Repository
+from .repo_ruleset import RepositoryRuleset
 from .repo_secret import RepositorySecret
 from .repo_webhook import RepositoryWebhook
 from .repo_workflow_settings import RepositoryWorkflowSettings
@@ -331,39 +332,55 @@ class GitHubOrganization:
 
         org = cls(github_id, settings)
 
-        start = datetime.now()
         if printer is not None and is_info_enabled():
+            start = datetime.now()
             printer.println("\nwebhooks: Reading...")
 
-        github_webhooks = client.get_org_webhooks(github_id)
+        if jsonnet_config.default_org_webhook_config is not None:
+            github_webhooks = client.get_org_webhooks(github_id)
 
-        if printer is not None and is_info_enabled():
-            end = datetime.now()
-            printer.println(f"webhooks: Read complete after {(end - start).total_seconds()}s")
+            if printer is not None and is_info_enabled():
+                end = datetime.now()
+                printer.println(f"webhooks: Read complete after {(end - start).total_seconds()}s")
 
-        for webhook in github_webhooks:
-            org.add_webhook(OrganizationWebhook.from_provider_data(github_id, webhook))
+            for webhook in github_webhooks:
+                org.add_webhook(OrganizationWebhook.from_provider_data(github_id, webhook))
+        else:
+            print_debug("not reading org webhooks, no default config available")
 
-        start = datetime.now()
-        if printer is not None and is_info_enabled():
-            printer.println("\nsecrets: Reading...")
+        if jsonnet_config.default_org_secret_config is not None:
+            start = datetime.now()
+            if printer is not None and is_info_enabled():
+                printer.println("\nsecrets: Reading...")
 
-        github_secrets = client.get_org_secrets(github_id)
+            github_secrets = client.get_org_secrets(github_id)
 
-        if printer is not None and is_info_enabled():
-            end = datetime.now()
-            printer.println(f"secrets: Read complete after {(end - start).total_seconds()}s")
+            if printer is not None and is_info_enabled():
+                end = datetime.now()
+                printer.println(f"secrets: Read complete after {(end - start).total_seconds()}s")
 
-        for secret in github_secrets:
-            org.add_secret(OrganizationSecret.from_provider_data(github_id, secret))
+            for secret in github_secrets:
+                org.add_secret(OrganizationSecret.from_provider_data(github_id, secret))
+        else:
+            print_debug("not reading org secrets, no default config available")
 
-        for repo in _load_repos_from_provider(github_id, client, printer):
-            org.add_repository(repo)
+        if jsonnet_config.default_repo_config is not None:
+            for repo in _load_repos_from_provider(github_id, client, jsonnet_config, printer):
+                org.add_repository(repo)
+        else:
+            print_debug("not reading repos, no default config available")
 
         return org
 
 
-async def _process_single_repo(gh_client: GitHubProvider, github_id: str, repo_name: str) -> tuple[str, Repository]:
+async def _process_single_repo(
+    gh_client: GitHubProvider,
+    github_id: str,
+    repo_name: str,
+    jsonnet_config: JsonnetConfig,
+    teams: dict[str, Any],
+    app_installations: dict[str, str],
+) -> tuple[str, Repository]:
     rest_api = gh_client.rest_api
 
     # get repo data
@@ -373,25 +390,66 @@ async def _process_single_repo(gh_client: GitHubProvider, github_id: str, repo_n
     github_repo_workflow_data = await rest_api.repo.async_get_workflow_settings(github_id, repo_name)
     repo.workflows = RepositoryWorkflowSettings.from_provider_data(github_id, github_repo_workflow_data)
 
-    # get branch protection rules of the repo
-    rules = await gh_client.get_branch_protection_rules(github_id, repo_name)
-    for github_rule in rules:
-        repo.add_branch_protection_rule(BranchProtectionRule.from_provider_data(github_id, github_rule))
-
-    # get webhooks of the repo
-    webhooks = await rest_api.repo.async_get_webhooks(github_id, repo_name)
-    for github_webhook in webhooks:
-        repo.add_webhook(RepositoryWebhook.from_provider_data(github_id, github_webhook))
-
-    # get secrets of the repo
-    secrets = await rest_api.repo.async_get_secrets(github_id, repo_name)
-    for github_secret in secrets:
-        repo.add_secret(RepositorySecret.from_provider_data(github_id, github_secret))
-
-    # get environments of the repo
-    environments = await rest_api.repo.async_get_environments(github_id, repo_name)
-    for github_environment in environments:
-        repo.add_environment(Environment.from_provider_data(github_id, github_environment))
+    if jsonnet_config.default_branch_protection_rule_config is not None:
+        # get branch protection rules of the repo
+        rules = await gh_client.get_branch_protection_rules(github_id, repo_name)
+        for github_rule in rules:
+            repo.add_branch_protection_rule(BranchProtectionRule.from_provider_data(github_id, github_rule))
+    else:
+        print_debug("not reading branch protection rules, no default config available")
+
+    if jsonnet_config.default_repo_ruleset_config is not None:
+        # get rulesets of the repo
+        rulesets = await rest_api.repo.async_get_rulesets(github_id, repo_name)
+        for github_ruleset in rulesets:
+            # FIXME: need to associate an app id to its slug
+            #        GitHub does not support that atm, so we lookup the currently installed
+            #        apps for an organization which provide a mapping from id to slug.
+            for actor in github_ruleset.get("bypass_actors", []):
+                if actor.get("actor_type", None) == "Integration":
+                    actor_id = str(actor.get("actor_id", 0))
+                    if actor_id in app_installations:
+                        actor["app_slug"] = app_installations[actor_id]
+                elif actor.get("actor_type", None) == "Team":
+                    actor_id = str(actor.get("actor_id", 0))
+                    if actor_id in teams:
+                        actor["team_slug"] = teams[actor_id]
+
+            for rule in github_ruleset.get("rules", []):
+                if rule.get("type", None) == "required_status_checks":
+                    required_status_checks = rule.get("parameters", {}).get("required_status_checks", [])
+                    for status_check in required_status_checks:
+                        integration_id = str(status_check.get("integration_id", 0))
+                        if integration_id in app_installations:
+                            status_check["app_slug"] = app_installations[integration_id]
+
+            repo.add_ruleset(RepositoryRuleset.from_provider_data(github_id, github_ruleset))
+    else:
+        print_debug("not reading repo rulesets, no default config available")
+
+    if jsonnet_config.default_org_webhook_config is not None:
+        # get webhooks of the repo
+        webhooks = await rest_api.repo.async_get_webhooks(github_id, repo_name)
+        for github_webhook in webhooks:
+            repo.add_webhook(RepositoryWebhook.from_provider_data(github_id, github_webhook))
+    else:
+        print_debug("not reading repo webhooks, no default config available")
+
+    if jsonnet_config.default_repo_secret_config is not None:
+        # get secrets of the repo
+        secrets = await rest_api.repo.async_get_secrets(github_id, repo_name)
+        for github_secret in secrets:
+            repo.add_secret(RepositorySecret.from_provider_data(github_id, github_secret))
+    else:
+        print_debug("not reading repo secrets, no default config available")
+
+    if jsonnet_config.default_environment_config is not None:
+        # get environments of the repo
+        environments = await rest_api.repo.async_get_environments(github_id, repo_name)
+        for github_environment in environments:
+            repo.add_environment(Environment.from_provider_data(github_id, github_environment))
+    else:
+        print_debug("not reading environments, no default config available")
 
     if is_debug_enabled():
         print_debug(f"done retrieving data for repo '{repo_name}'")
@@ -400,7 +458,7 @@ async def _process_single_repo(gh_client: GitHubProvider, github_id: str, repo_n
 
 
 def _load_repos_from_provider(
-    github_id: str, client: GitHubProvider, printer: Optional[IndentingPrinter] = None
+    github_id: str, client: GitHubProvider, jsonnet_config: JsonnetConfig, printer: Optional[IndentingPrinter] = None
 ) -> list[Repository]:
     start = datetime.now()
     if printer is not None and is_info_enabled():
@@ -408,8 +466,20 @@ def _load_repos_from_provider(
 
     repo_names = client.get_repos(github_id)
 
+    teams = {str(team["id"]): f"{github_id}/{team['slug']}" for team in client.rest_api.org.get_teams(github_id)}
+
+    app_installations = {
+        str(installation["app_id"]): installation["app_slug"]
+        for installation in client.rest_api.org.get_app_installations(github_id)
+    }
+
     async def gather():
-        return await asyncio.gather(*[_process_single_repo(client, github_id, repo_name) for repo_name in repo_names])
+        return await asyncio.gather(
+            *[
+                _process_single_repo(client, github_id, repo_name, jsonnet_config, teams, app_installations)
+                for repo_name in repo_names
+            ]
+        )
 
     result = asyncio.run(gather())
 
diff --git a/otterdog/models/repo_ruleset.py b/otterdog/models/repo_ruleset.py
new file mode 100644
index 0000000000000000000000000000000000000000..1dbdf361b6067fc8e1b9d4b46bb1cfb30753197c
--- /dev/null
+++ b/otterdog/models/repo_ruleset.py
@@ -0,0 +1,70 @@
+# *******************************************************************************
+# Copyright (c) 2023 Eclipse Foundation and others.
+# This program and the accompanying materials are made available
+# under the terms of the MIT License
+# which is available at https://spdx.org/licenses/MIT.html
+# SPDX-License-Identifier: MIT
+# *******************************************************************************
+
+from __future__ import annotations
+
+import dataclasses
+
+from otterdog.jsonnet import JsonnetConfig
+from otterdog.models import ModelObject, LivePatch, LivePatchType
+from otterdog.models.ruleset import Ruleset
+from otterdog.providers.github import GitHubProvider
+from otterdog.utils import IndentingPrinter, write_patch_object_as_json
+
+
+@dataclasses.dataclass
+class RepositoryRuleset(Ruleset):
+    """
+    Represents a ruleset defined on repo level.
+    """
+
+    @property
+    def model_object_name(self) -> str:
+        return "repo_ruleset"
+
+    def to_jsonnet(
+        self,
+        printer: IndentingPrinter,
+        jsonnet_config: JsonnetConfig,
+        extend: bool,
+        default_object: ModelObject,
+    ) -> None:
+        patch = self.get_patch_to(default_object)
+        patch.pop("name")
+        printer.print(f"orgs.{jsonnet_config.create_repo_ruleset}('{self.name}')")
+        write_patch_object_as_json(patch, printer)
+
+    @classmethod
+    def apply_live_patch(cls, patch: LivePatch, org_id: str, provider: GitHubProvider) -> None:
+        from .repository import Repository
+
+        match patch.patch_type:
+            case LivePatchType.ADD:
+                assert isinstance(patch.expected_object, RepositoryRuleset)
+                assert isinstance(patch.parent_object, Repository)
+                provider.add_repo_ruleset(
+                    org_id, patch.parent_object.name, patch.expected_object.to_provider_data(org_id, provider)
+                )
+
+            case LivePatchType.REMOVE:
+                assert isinstance(patch.current_object, RepositoryRuleset)
+                assert isinstance(patch.parent_object, Repository)
+                provider.delete_repo_ruleset(
+                    org_id, patch.parent_object.name, patch.current_object.id, patch.current_object.name
+                )
+
+            case LivePatchType.CHANGE:
+                assert isinstance(patch.expected_object, RepositoryRuleset)
+                assert isinstance(patch.current_object, RepositoryRuleset)
+                assert isinstance(patch.parent_object, Repository)
+                provider.update_repo_ruleset(
+                    org_id,
+                    patch.parent_object.name,
+                    patch.current_object.id,
+                    patch.expected_object.to_provider_data(org_id, provider),
+                )
diff --git a/otterdog/models/repository.py b/otterdog/models/repository.py
index 55de0c7759df8f9a03a2143ebbd1a4671fdd0eb2..f1ecdc8bfef44d8b80581a987f0934259789b4a3 100644
--- a/otterdog/models/repository.py
+++ b/otterdog/models/repository.py
@@ -38,6 +38,7 @@ from otterdog.utils import (
 
 from .branch_protection_rule import BranchProtectionRule
 from .environment import Environment
+from .repo_ruleset import RepositoryRuleset
 from .repo_secret import RepositorySecret
 from .repo_webhook import RepositoryWebhook
 from .repo_workflow_settings import RepositoryWorkflowSettings
@@ -98,6 +99,7 @@ class Repository(ModelObject):
     branch_protection_rules: list[BranchProtectionRule] = dataclasses.field(
         metadata={"nested_model": True}, default_factory=list
     )
+    rulesets: list[RepositoryRuleset] = dataclasses.field(metadata={"nested_model": True}, default_factory=list)
     environments: list[Environment] = dataclasses.field(metadata={"nested_model": True}, default_factory=list)
 
     _security_properties: ClassVar[list[str]] = [
@@ -146,6 +148,12 @@ class Repository(ModelObject):
     def set_branch_protection_rules(self, rules: list[BranchProtectionRule]) -> None:
         self.branch_protection_rules = rules
 
+    def add_ruleset(self, rule: RepositoryRuleset) -> None:
+        self.rulesets.append(rule)
+
+    def set_rulesets(self, rules: list[RepositoryRuleset]) -> None:
+        self.rulesets = rules
+
     def add_webhook(self, webhook: RepositoryWebhook) -> None:
         self.webhooks.append(webhook)
 
@@ -344,6 +352,9 @@ class Repository(ModelObject):
         for bpr in self.branch_protection_rules:
             bpr.validate(context, self)
 
+        for rule in self.rulesets:
+            rule.validate(context, self)
+
         for env in self.environments:
             env.validate(context, self)
 
@@ -372,9 +383,13 @@ class Repository(ModelObject):
             yield secret, self
             yield from secret.get_model_objects()
 
-        for rule in self.branch_protection_rules:
-            yield rule, self
-            yield from rule.get_model_objects()
+        for bpr in self.branch_protection_rules:
+            yield bpr, self
+            yield from bpr.get_model_objects()
+
+        for ruleset in self.rulesets:
+            yield ruleset, self
+            yield from ruleset.get_model_objects()
 
         for env in self.environments:
             yield env, self
@@ -393,6 +408,7 @@ class Repository(ModelObject):
                 "secrets": OptionalS("secrets", default=[]) >> Forall(lambda x: RepositorySecret.from_model_data(x)),
                 "branch_protection_rules": OptionalS("branch_protection_rules", default=[])
                 >> Forall(lambda x: BranchProtectionRule.from_model_data(x)),
+                "rulesets": OptionalS("rulesets", default=[]) >> Forall(lambda x: RepositoryRuleset.from_model_data(x)),
                 "environments": OptionalS("environments", default=[])
                 >> Forall(lambda x: Environment.from_model_data(x)),
                 "workflows": If(
@@ -436,6 +452,7 @@ class Repository(ModelObject):
                 "webhooks": K([]),
                 "secrets": K([]),
                 "branch_protection_rules": K([]),
+                "rulesets": K([]),
                 "environments": K([]),
                 "secret_scanning": OptionalS("security_and_analysis", "secret_scanning", "status", default=UNSET),
                 "secret_scanning_push_protection": OptionalS(
@@ -539,6 +556,7 @@ class Repository(ModelObject):
         has_webhooks = len(self.webhooks) > 0
         has_secrets = len(self.secrets) > 0
         has_branch_protection_rules = len(self.branch_protection_rules) > 0
+        has_rulesets = len(self.rulesets) > 0
         has_environments = len(self.environments) > 0
 
         # FIXME: take webhooks, branch protection rules and environments into account once
@@ -593,7 +611,9 @@ class Repository(ModelObject):
         # FIXME: support overriding branch protection rules for repos coming from
         #        the default configuration.
         if has_branch_protection_rules and not extend:
-            default_org_rule = BranchProtectionRule.from_model_data(jsonnet_config.default_branch_config)
+            default_org_rule = BranchProtectionRule.from_model_data(
+                jsonnet_config.default_branch_protection_rule_config
+            )
 
             printer.println("branch_protection_rules: [")
             printer.level_up()
@@ -604,6 +624,19 @@ class Repository(ModelObject):
             printer.level_down()
             printer.println("],")
 
+        # FIXME: support overriding rulesets for repos coming from the default configuration.
+        if has_rulesets and not extend:
+            default_org_rule = RepositoryRuleset.from_model_data(jsonnet_config.default_repo_ruleset_config)
+
+            printer.println("rulesets: [")
+            printer.level_up()
+
+            for ruleset in self.rulesets:
+                ruleset.to_jsonnet(printer, jsonnet_config, False, default_org_rule)
+
+            printer.level_down()
+            printer.println("],")
+
         # FIXME: support overriding environments for repos coming from
         #        the default configuration.
         if has_environments and not extend:
@@ -722,6 +755,14 @@ class Repository(ModelObject):
                 handler,
             )
 
+            RepositoryRuleset.generate_live_patch_of_list(
+                expected_object.rulesets,
+                current_object.rulesets if current_object is not None else [],
+                parent_repo,
+                context,
+                handler,
+            )
+
     @classmethod
     def generate_live_patch_of_list(
         cls,
diff --git a/otterdog/models/ruleset.py b/otterdog/models/ruleset.py
new file mode 100644
index 0000000000000000000000000000000000000000..aef51bf7da79b4cbe78c4ecaf16177a44513c805
--- /dev/null
+++ b/otterdog/models/ruleset.py
@@ -0,0 +1,570 @@
+# *******************************************************************************
+# Copyright (c) 2023 Eclipse Foundation and others.
+# This program and the accompanying materials are made available
+# under the terms of the MIT License
+# which is available at https://spdx.org/licenses/MIT.html
+# SPDX-License-Identifier: MIT
+# *******************************************************************************
+
+from __future__ import annotations
+
+import abc
+import dataclasses
+import re
+from typing import Any, TypeVar, Optional, cast, ClassVar
+
+from jsonbender import bend, S, OptionalS, K  # type: ignore
+
+from otterdog.models import ModelObject, ValidationContext, LivePatchContext, LivePatchHandler, FailureType, LivePatch
+from otterdog.providers.github import GitHubProvider
+from otterdog.utils import UNSET, is_unset, associate_by_key, is_set_and_valid, print_warn, Change
+
+RS = TypeVar("RS", bound="Ruleset")
+
+
+@dataclasses.dataclass
+class Ruleset(ModelObject, abc.ABC):
+    """
+    Represents a Ruleset.
+    """
+
+    id: int = dataclasses.field(metadata={"external_only": True})
+    name: str = dataclasses.field(metadata={"key": True})
+    node_id: str = dataclasses.field(metadata={"external_only": True})
+    enforcement: str
+
+    bypass_actors: list[str]
+
+    include_refs: list[str]
+    exclude_refs: list[str]
+
+    allows_creations: bool
+    allows_deletions: bool
+    allows_updates: bool
+    allows_force_pushes: bool
+
+    requires_commit_signatures: bool
+    requires_linear_history: bool
+
+    requires_pull_request: bool
+    # the following settings are only taken into account
+    # when requires_pull_request is True
+    required_approving_review_count: Optional[int]
+    dismisses_stale_reviews: bool
+    requires_code_owner_review: bool
+    requires_last_push_approval: bool
+    requires_review_thread_resolution: bool
+
+    requires_status_checks: bool
+    requires_strict_status_checks: bool
+    required_status_checks: list[str]
+
+    requires_deployments: bool
+    required_deployment_environments: list[str]
+
+    _roles: ClassVar[dict[str, str]] = {"5": "RepositoryAdmin", "4": "Write", "2": "Maintain", "1": "OrganizationAdmin"}
+    _inverted_roles: ClassVar[dict[str, str]] = dict((v, k) for k, v in _roles.items())
+
+    def validate(self, context: ValidationContext, parent_object: Any) -> None:
+        from .github_organization import GitHubOrganization
+
+        org_settings = cast(GitHubOrganization, context.root_object).settings
+
+        free_plan = org_settings.plan == "free"
+
+        if is_set_and_valid(self.enforcement):
+            if self.enforcement not in {"active", "disabled", "evaluate"}:
+                context.add_failure(
+                    FailureType.ERROR,
+                    f"{self.get_model_header(parent_object)} has 'enforcement' of value "
+                    f"'{self.enforcement}', "
+                    f"only values ('active' | 'disabled' | 'evaluate') are allowed.",
+                )
+
+            if free_plan is True and self.enforcement == "evaluate":
+                context.add_failure(
+                    FailureType.ERROR,
+                    f"{self.get_model_header(parent_object)} has 'enforcement' of value "
+                    f"'{self.enforcement}' which is not available for a 'free' plan.",
+                )
+
+        def valid_condition_pattern(pattern: str) -> bool:
+            return pattern.startswith("refs/heads/") or ref == "~DEFAULT_BRANCH" or ref == "~ALL"
+
+        if is_set_and_valid(self.include_refs):
+            for ref in self.include_refs:
+                if not valid_condition_pattern(ref):
+                    context.add_failure(
+                        FailureType.ERROR,
+                        f"{self.get_model_header(parent_object)} has an invalid 'include_refs' pattern "
+                        f"'{ref}', only values ('refs/heads/*', '~DEFAULT_BRANCH', '~ALL') are allowed",
+                    )
+
+        if is_set_and_valid(self.exclude_refs):
+            for ref in self.exclude_refs:
+                if not valid_condition_pattern(ref):
+                    context.add_failure(
+                        FailureType.ERROR,
+                        f"{self.get_model_header(parent_object)} has an invalid 'exclude_refs' pattern "
+                        f"'{ref}', only values ('refs/heads/*', '~DEFAULT_BRANCH', '~ALL') are allowed",
+                    )
+
+        if self.requires_pull_request is False:
+            if is_set_and_valid(self.required_approving_review_count):
+                context.add_failure(
+                    FailureType.WARNING,
+                    f"{self.get_model_header(parent_object)} has"
+                    f" 'requires_pull_request' disabled but 'required_approving_review_count' "
+                    f"is set to '{self.required_approving_review_count}', setting will be ignored.",
+                )
+
+            for key in {
+                "dismisses_stale_reviews",
+                "requires_code_owner_review",
+                "requires_last_push_approval",
+                "requires_review_thread_resolution",
+            }:
+                if self.__getattribute__(key) is True:
+                    context.add_failure(
+                        FailureType.WARNING,
+                        f"{self.get_model_header(parent_object)} has"
+                        f" 'requires_pull_request' disabled but '{key}' "
+                        f"is enabled, setting will be ignored.",
+                    )
+
+        # required_approving_review_count must be defined when requires_pull_request is enabled
+        required_approving_review_count = self.required_approving_review_count
+        if self.requires_pull_request is True and not is_unset(required_approving_review_count):
+            if required_approving_review_count is None or required_approving_review_count < 0:
+                context.add_failure(
+                    FailureType.ERROR,
+                    f"{self.get_model_header(parent_object)} has"
+                    f" 'requires_pull_request' enabled but 'required_approving_review_count' "
+                    f"is not set (must be set to a non negative number).",
+                )
+
+        # if 'requires_status_checks' is disabled, issue a warning if required_status_checks is non-empty.
+        required_status_checks = self.required_status_checks
+        if (
+            self.requires_status_checks is False
+            and is_set_and_valid(required_status_checks)
+            and len(required_status_checks) > 0
+        ):
+            context.add_failure(
+                FailureType.INFO,
+                f"{self.get_model_header(parent_object)} has"
+                f" 'requires_status_checks' disabled but "
+                f"'required_status_checks' is set to '{self.required_status_checks}', "
+                f"setting will be ignored.",
+            )
+
+        # if 'requires_deployments' is disabled, issue a warning if required_deployment_environments is non-empty.
+        if (
+            self.requires_deployments is False
+            and is_set_and_valid(self.required_deployment_environments)
+            and len(self.required_deployment_environments) > 0
+        ):
+            context.add_failure(
+                FailureType.WARNING,
+                f"{self.get_model_header(parent_object)} has "
+                f"'requires_deployments' disabled but "
+                f"'required_deployment_environments' is set to "
+                f"'{self.required_deployment_environments}', setting will be ignored.",
+            )
+
+        if self.requires_deployments is True and len(self.required_deployment_environments) > 0:
+            from .repository import Repository
+
+            environments = cast(Repository, parent_object).environments
+
+            environments_by_name = associate_by_key(environments, lambda x: x.name)
+            for env_name in self.required_deployment_environments:
+                if env_name not in environments_by_name:
+                    context.add_failure(
+                        FailureType.ERROR,
+                        f"{self.get_model_header(parent_object)} requires deployment environment "
+                        f"'{env_name}' which is not defined in the repository itself.",
+                    )
+
+    def include_field_for_diff_computation(self, field: dataclasses.Field) -> bool:
+        # disable diff computation for dependent fields of requires_pull_request,
+        if self.requires_pull_request is False:
+            if field.name in [
+                "required_approving_review_count",
+                "dismisses_stale_reviews",
+                "requires_code_owner_review",
+                "requires_last_push_approval",
+                "requires_review_thread_resolution",
+            ]:
+                return False
+
+        if self.requires_status_checks is False:
+            if field.name in [
+                "required_status_checks",
+                "requires_strict_status_checks",
+            ]:
+                return False
+
+        if self.requires_deployments is False:
+            if field.name in ["required_deployment_environments"]:
+                return False
+
+        return True
+
+    def include_field_for_patch_computation(self, field: dataclasses.Field) -> bool:
+        return True
+
+    @classmethod
+    def from_model_data(cls, data: dict[str, Any]):
+        mapping = {k: OptionalS(k, default=UNSET) for k in map(lambda x: x.name, cls.all_fields())}
+        return cls(**bend(mapping, data))
+
+    @classmethod
+    def from_provider_data(cls, org_id: str, data: dict[str, Any]):
+        mapping = cls.get_mapping_from_provider(org_id, data)
+        return cls(**bend(mapping, data))
+
+    @classmethod
+    def get_mapping_from_provider(cls, org_id: str, data: dict[str, Any]) -> dict[str, Any]:
+        mapping = {k: OptionalS(k, default=UNSET) for k in map(lambda x: x.name, cls.all_fields())}
+
+        mapping.update(
+            {
+                "include_refs": OptionalS("conditions", "ref_name", "include", default=[]),
+                "exclude_refs": OptionalS("conditions", "ref_name", "exclude", default=[]),
+            }
+        )
+
+        rules = data.get("rules", [])
+
+        def check_simple_rule(prop_key: str, rule_type: str, value_if_rule_is_present: bool) -> None:
+            if any((_ := rule) for rule in rules if rule["type"] == rule_type):
+                mapping[prop_key] = K(value_if_rule_is_present)
+            else:
+                mapping[prop_key] = K(not value_if_rule_is_present)
+
+        check_simple_rule("allows_deletions", "deletion", False)
+        check_simple_rule("allows_creations", "creation", False)
+        check_simple_rule("allows_updates", "update", False)
+        check_simple_rule("allows_force_pushes", "non_fast_forward", False)
+
+        check_simple_rule("requires_commit_signatures", "required_signatures", True)
+        check_simple_rule("requires_linear_history", "required_linear_history", True)
+
+        # bypass actors
+        if "bypass_actors" in data:
+            bypass_actors = data["bypass_actors"]
+            transformed_actors = []
+            for actor in bypass_actors:
+                actor_type = actor["actor_type"]
+
+                if actor_type == "RepositoryRole" or actor_type == "OrganizationAdmin":
+                    role = cls._roles.get(str(actor['actor_id']), None)
+                    if role is None:
+                        print_warn(f"fail to map repository role '{actor['actor_id']}', skipping")
+                        continue
+                    transformed_actor = f"#{role}"
+                elif actor_type == "Team":
+                    transformed_actor = f"@{actor['team_slug']}"
+                elif actor_type == "Integration":
+                    transformed_actor = actor["app_slug"]
+                else:
+                    continue
+
+                bypass_mode = actor.get("bypass_mode", "always")
+                if bypass_mode != "always":
+                    transformed_actor = f"{transformed_actor}:{bypass_mode}"
+
+                transformed_actors.append(transformed_actor)
+
+            mapping["bypass_actors"] = K(transformed_actors)
+
+        else:
+            mapping["bypass_actors"] = K([])
+
+        # requires pull request
+        if any((found := rule) for rule in rules if rule["type"] == "pull_request"):
+            mapping["requires_pull_request"] = K(True)
+            parameters = found.get("parameters", {})
+
+            mapping["required_approving_review_count"] = K(parameters.get("required_approving_review_count", None))
+            mapping["dismisses_stale_reviews"] = K(parameters.get("dismiss_stale_reviews_on_push", UNSET))
+            mapping["requires_code_owner_review"] = K(parameters.get("require_code_owner_review", UNSET))
+            mapping["requires_last_push_approval"] = K(parameters.get("require_last_push_approval", UNSET))
+            mapping["requires_review_thread_resolution"] = K(parameters.get("required_review_thread_resolution", UNSET))
+        else:
+            mapping["requires_pull_request"] = K(False)
+            mapping["required_approving_review_count"] = K(None)
+            mapping["dismisses_stale_reviews"] = K(UNSET)
+            mapping["requires_code_owner_review"] = K(UNSET)
+            mapping["requires_last_push_approval"] = K(UNSET)
+            mapping["requires_review_thread_resolution"] = K(UNSET)
+
+        # required status checks
+        if any((found := rule) for rule in rules if rule["type"] == "required_status_checks"):
+            mapping["requires_status_checks"] = K(True)
+            parameters = found.get("parameters", {})
+
+            mapping["requires_strict_status_checks"] = K(parameters.get("strict_required_status_checks_policy", UNSET))
+
+            raw_status_checks = parameters.get("required_status_checks", [])
+            status_checks = []
+            for status_check in raw_status_checks:
+                if "app_slug" in status_check:
+                    check = status_check["app_slug"] + ":" + status_check["context"]
+                else:
+                    check = status_check["context"]
+
+                status_checks.append(check)
+
+            mapping["required_status_checks"] = K(status_checks)
+        else:
+            mapping["requires_status_checks"] = K(False)
+            mapping["requires_strict_status_checks"] = K(UNSET)
+            mapping["required_status_checks"] = K([])
+
+        # required deployments
+        if any((found := rule) for rule in rules if rule["type"] == "required_deployments"):
+            mapping["requires_deployments"] = K(True)
+            deployment_environments = found.get("parameters", {}).get("required_deployment_environments", [])
+            mapping["required_deployment_environments"] = K(deployment_environments)
+        else:
+            mapping["requires_deployments"] = K(False)
+            mapping["required_deployment_environments"] = K([])
+
+        return mapping
+
+    @classmethod
+    def get_mapping_to_provider(cls, org_id: str, data: dict[str, Any], provider: GitHubProvider) -> dict[str, Any]:
+        mapping = {
+            field.name: S(field.name) for field in cls.provider_fields() if not is_unset(data.get(field.name, UNSET))
+        }
+
+        def pop_mapping(keys: list[str]) -> None:
+            for key_to_pop in keys:
+                if key_to_pop in mapping:
+                    mapping.pop(key_to_pop)
+
+        # include / excludes
+        ref_names = {}
+        if "include_refs" in data:
+            mapping.pop("include_refs")
+            ref_names["include"] = K(data["include_refs"])
+
+        if "exclude_refs" in data:
+            mapping.pop("exclude_refs")
+            ref_names["exclude"] = K(data["exclude_refs"])
+
+        if len(ref_names) > 0:
+            mapping["conditions"] = {"ref_name": ref_names}
+
+        # bypass actors
+        if "bypass_actors" in data:
+            bypass_actors: list[str] = data["bypass_actors"]
+            transformed_actors = []
+
+            def extract_actor_and_bypass_mode(encoded_data: str) -> tuple[str, str]:
+                if ":" in encoded_data:
+                    a, m = re.split(":", encoded_data, 1)
+                else:
+                    a = encoded_data
+                    m = "always"
+
+                return a, m
+
+            for actor in bypass_actors:
+                if actor.startswith("#"):
+                    role, bypass_mode = extract_actor_and_bypass_mode(actor[1:])
+                    actor_type = "RepositoryRole"
+                    actor_id = cls._inverted_roles[role]
+                    if actor_id == "1":
+                        actor_type = "OrganizationAdmin"
+                elif actor.startswith("@"):
+                    team, bypass_mode = extract_actor_and_bypass_mode(actor[1:])
+                    actor_type = "Team"
+                    actor_id = provider.rest_api.org.get_team_ids(team)[0]
+                else:
+                    app, bypass_mode = extract_actor_and_bypass_mode(actor)
+                    actor_type = "Integration"
+                    actor_id = provider.rest_api.app.get_app_ids(app)[0]
+
+                transformed_actors.append(
+                    {"actor_id": K(int(actor_id)), "actor_type": K(actor_type), "bypass_mode": K(bypass_mode)}
+                )
+
+            mapping["bypass_actors"] = transformed_actors
+
+        # rules
+        rules: list[Any] = []
+
+        def add_simple_rule(prop_key: str, rule_type: str, inverted: bool) -> None:
+            if prop_key in data:
+                mapping.pop(prop_key)
+                prop_value = data[prop_key]
+
+                if (inverted and not prop_value) or (not inverted and prop_value):
+                    rules.append({"type": K(rule_type)})
+
+        add_simple_rule("allows_deletions", "deletion", True)
+        add_simple_rule("allows_creations", "creation", True)
+        add_simple_rule("allows_updates", "update", True)
+        add_simple_rule("allows_force_pushes", "non_fast_forward", True)
+
+        add_simple_rule("requires_commit_signatures", "required_signatures", False)
+        add_simple_rule("requires_linear_history", "required_linear_history", False)
+
+        def add_parameter(prop_key: str, param_key: str, params: dict[str, Any]):
+            if prop_key in data:
+                params[param_key] = S(prop_key)
+
+        # requires pull request
+        if "requires_pull_request" in data:
+            value = data["requires_pull_request"]
+
+            if value is True:
+                rule = {"type": K("pull_request")}
+                pull_parameters: dict[str, Any] = {}
+
+                add_parameter("required_approving_review_count", "required_approving_review_count", pull_parameters)
+                add_parameter("dismisses_stale_reviews", "dismiss_stale_reviews_on_push", pull_parameters)
+                add_parameter("requires_code_owner_review", "require_code_owner_review", pull_parameters)
+                add_parameter("requires_last_push_approval", "require_last_push_approval", pull_parameters)
+                add_parameter("requires_review_thread_resolution", "required_review_thread_resolution", pull_parameters)
+
+                rule["parameters"] = pull_parameters
+                rules.append(rule)
+
+        pop_mapping(
+            [
+                "requires_pull_request",
+                "required_approving_review_count",
+                "dismisses_stale_reviews",
+                "requires_code_owner_review",
+                "requires_last_push_approval",
+                "requires_last_push_approval",
+                "requires_review_thread_resolution",
+            ]
+        )
+
+        # required status checks
+        if "requires_status_checks" in data:
+            value = data["requires_status_checks"]
+            if value is True:
+                rule = {"type": K("required_status_checks")}
+                status_checks_parameters: dict[str, Any] = {}
+                add_parameter(
+                    "requires_strict_status_checks", "strict_required_status_checks_policy", status_checks_parameters
+                )
+
+                if "required_status_checks" in data:
+                    required_status_checks = data["required_status_checks"]
+                    app_slugs = set()
+
+                    for check in required_status_checks:
+                        if ":" in check:
+                            app_slug, context = re.split(":", check, 1)
+                            app_slugs.add(app_slug)
+
+                    app_ids = provider.get_app_ids(app_slugs)
+
+                    transformed_checks = []
+                    for check in required_status_checks:
+                        if ":" in check:
+                            app_slug, context = re.split(":", check, 1)
+                        else:
+                            app_slug = None
+                            context = check
+
+                        if app_slug is None:
+                            transformed_checks.append({"context": context})
+                        else:
+                            transformed_checks.append({"integration_id": app_ids[app_slug], "context": context})
+
+                    status_checks_parameters["required_status_checks"] = K(transformed_checks)
+
+                rule["parameters"] = status_checks_parameters
+                rules.append(rule)
+
+        pop_mapping(["requires_status_checks", "requires_strict_status_checks", "required_status_checks"])
+
+        # # required deployments
+        if "requires_deployments" in data:
+            value = data["requires_deployments"]
+            if value is True:
+                rule = {"type": K("required_deployments")}
+                deployment_parameters: dict[str, Any] = {}
+                add_parameter(
+                    "required_deployment_environments", "required_deployment_environments", deployment_parameters
+                )
+                rule["parameters"] = deployment_parameters
+                rules.append(rule)
+
+        pop_mapping(["requires_deployments", "required_deployment_environments"])
+
+        if len(rules) > 0:
+            mapping["rules"] = rules
+
+        return mapping
+
+    @classmethod
+    def generate_live_patch(
+        cls,
+        expected_object: Optional[ModelObject],
+        current_object: Optional[ModelObject],
+        parent_object: Optional[ModelObject],
+        context: LivePatchContext,
+        handler: LivePatchHandler,
+    ) -> None:
+        if current_object is None:
+            assert isinstance(expected_object, Ruleset)
+            handler(LivePatch.of_addition(expected_object, parent_object, expected_object.apply_live_patch))
+            return
+
+        if expected_object is None:
+            assert isinstance(current_object, Ruleset)
+            handler(LivePatch.of_deletion(current_object, parent_object, current_object.apply_live_patch))
+            return
+
+        assert isinstance(expected_object, Ruleset)
+        assert isinstance(current_object, Ruleset)
+
+        modified_rule: dict[str, Change[Any]] = expected_object.get_difference_from(current_object)
+
+        if len(modified_rule) > 0:
+            handler(
+                LivePatch.of_changes(
+                    expected_object,
+                    current_object,
+                    modified_rule,
+                    parent_object,
+                    False,
+                    expected_object.apply_live_patch,
+                )
+            )
+
+    @classmethod
+    def generate_live_patch_of_list(
+        cls,
+        expected_rulesets: list[RS],
+        current_rulesets: list[RS],
+        parent_object: Optional[ModelObject],
+        context: LivePatchContext,
+        handler: LivePatchHandler,
+    ) -> None:
+        expected_rulesets_by_name = associate_by_key(expected_rulesets, lambda x: x.name)
+
+        for current_ruleset in current_rulesets:
+            ruleset_name = current_ruleset.name
+
+            expected_ruleset = expected_rulesets_by_name.get(ruleset_name)
+            if expected_ruleset is None:
+                cls.generate_live_patch(None, current_ruleset, parent_object, context, handler)
+                continue
+
+            # pop the already handled secret
+            expected_rulesets_by_name.pop(expected_ruleset.name)
+
+            cls.generate_live_patch(expected_ruleset, current_ruleset, parent_object, context, handler)
+
+        for ruleset_name, ruleset in expected_rulesets_by_name.items():
+            cls.generate_live_patch(ruleset, None, parent_object, context, handler)
diff --git a/otterdog/providers/github/__init__.py b/otterdog/providers/github/__init__.py
index 1a6e74d81c47559a6ca3a193081d9ebe36084603..fae477a5815c28fc46abb64b4292d4922a787f32 100644
--- a/otterdog/providers/github/__init__.py
+++ b/otterdog/providers/github/__init__.py
@@ -210,6 +210,16 @@ class GitHubProvider:
     def delete_branch_protection_rule(self, org_id: str, repo_name: str, rule_pattern: str, rule_id: str) -> None:
         self.graphql_client.delete_branch_protection_rule(org_id, repo_name, rule_pattern, rule_id)
 
+    def update_repo_ruleset(self, org_id: str, repo_name: str, ruleset_id: int, ruleset: dict[str, Any]) -> None:
+        if len(ruleset) > 0:
+            self.rest_api.repo.update_ruleset(org_id, repo_name, ruleset_id, ruleset)
+
+    def add_repo_ruleset(self, org_id: str, repo_name: str, data: dict[str, str]) -> None:
+        self.rest_api.repo.add_ruleset(org_id, repo_name, data)
+
+    def delete_repo_ruleset(self, org_id: str, repo_name: str, ruleset_id: int, name: str) -> None:
+        self.rest_api.repo.delete_ruleset(org_id, repo_name, ruleset_id, name)
+
     def get_repo_webhooks(self, org_id: str, repo_name: str) -> list[dict[str, Any]]:
         return self.rest_api.repo.get_webhooks(org_id, repo_name)
 
@@ -310,6 +320,9 @@ class GitHubProvider:
     def get_app_node_ids(self, app_names: set[str]) -> dict[str, str]:
         return {app_name: self.rest_api.app.get_app_ids(app_name)[1] for app_name in app_names}
 
+    def get_app_ids(self, app_names: set[str]) -> dict[str, str]:
+        return {app_name: self.rest_api.app.get_app_ids(app_name)[0] for app_name in app_names}
+
     def get_ref_for_pull_request(self, org_id: str, repo_name: str, pull_number: str) -> str:
         return self.rest_api.repo.get_ref_for_pull_request(org_id, repo_name, pull_number)
 
diff --git a/otterdog/providers/github/rest/org_client.py b/otterdog/providers/github/rest/org_client.py
index c180e83eb729d8a5293bde53b5f8d7ffe6bf101d..b3d5554b139f8938e25c1d93e99f05485c97744f 100644
--- a/otterdog/providers/github/rest/org_client.py
+++ b/otterdog/providers/github/rest/org_client.py
@@ -256,6 +256,15 @@ class OrgClient(RestClient):
             tb = ex.__traceback__
             raise RuntimeError(f"failed retrieving team node id:\n{ex}").with_traceback(tb)
 
+    def get_teams(self, org_id: str) -> list[dict[str, Any]]:
+        print_debug(f"retrieving teams for org '{org_id}'")
+
+        try:
+            return self.requester.request_json("GET", f"/orgs/{org_id}/teams")
+        except GitHubException as ex:
+            tb = ex.__traceback__
+            raise RuntimeError(f"failed retrieving teams for org '{org_id}':\n{ex}").with_traceback(tb)
+
     def get_app_installations(self, org_id: str) -> list[dict[str, Any]]:
         print_debug(f"retrieving app installations for org '{org_id}'")
 
diff --git a/otterdog/providers/github/rest/repo_client.py b/otterdog/providers/github/rest/repo_client.py
index 70e860f39435382cbc9e0e0365a3642aabef3c4c..d29b9d1cdabf116e2679eb9ad20eff4b3e84a7ee 100644
--- a/otterdog/providers/github/rest/repo_client.py
+++ b/otterdog/providers/github/rest/repo_client.py
@@ -278,6 +278,69 @@ class RepoClient(RestClient):
 
         print_debug(f"removed repo webhook with url '{url}'")
 
+    async def async_get_rulesets(self, org_id: str, repo_name: str) -> list[dict[str, Any]]:
+        print_debug(f"async retrieving rulesets for repo '{org_id}/{repo_name}'")
+
+        try:
+            result = []
+            params = {"includes_parents": str(False)}
+            response = await self.requester.async_request_paged_json(
+                "GET", f"/repos/{org_id}/{repo_name}/rulesets", params=params
+            )
+            for ruleset in response:
+                result.append(await self.async_get_ruleset(org_id, repo_name, str(ruleset["id"])))
+            return result
+        except GitHubException as ex:
+            tb = ex.__traceback__
+            raise RuntimeError(f"failed retrieving rulesets for repo '{org_id}/{repo_name}':\n{ex}").with_traceback(tb)
+
+    async def async_get_ruleset(self, org_id: str, repo_name: str, ruleset_id: str) -> dict[str, Any]:
+        print_debug(f"async retrieving ruleset '{ruleset_id}' for repo '{org_id}/{repo_name}'")
+
+        try:
+            params = {"includes_parents": str(False)}
+            return await self.requester.async_request_json(
+                "GET", f"/repos/{org_id}/{repo_name}/rulesets/{ruleset_id}", params=params
+            )
+        except GitHubException as ex:
+            tb = ex.__traceback__
+            raise RuntimeError(f"failed retrieving ruleset for repo '{org_id}/{repo_name}':\n{ex}").with_traceback(tb)
+
+    def update_ruleset(self, org_id: str, repo_name: str, ruleset_id: int, ruleset: dict[str, Any]) -> None:
+        print_debug(f"updating repo ruleset '{ruleset_id}' for repo '{org_id}/{repo_name}'")
+
+        try:
+            self.requester.request_json("PUT", f"/repos/{org_id}/{repo_name}/rulesets/{ruleset_id}", ruleset)
+            print_debug(f"updated repo ruleset '{ruleset_id}'")
+        except GitHubException as ex:
+            tb = ex.__traceback__
+            raise RuntimeError(f"failed to update repo ruleset {ruleset_id}:\n{ex}").with_traceback(tb)
+
+    def add_ruleset(self, org_id: str, repo_name: str, data: dict[str, Any]) -> None:
+        name = data["name"]
+        print_debug(f"adding repo ruleset with name '{name}' for repo '{org_id}/{repo_name}'")
+
+        # TODO: currently we only support rulesets targetting branches
+        data["target"] = "branch"
+        print(data)
+
+        try:
+            self.requester.request_json("POST", f"/repos/{org_id}/{repo_name}/rulesets", data)
+            print_debug(f"added repo ruleset with name '{name}'")
+        except GitHubException as ex:
+            tb = ex.__traceback__
+            raise RuntimeError(f"failed to add repo ruleset with name '{name}':\n{ex}").with_traceback(tb)
+
+    def delete_ruleset(self, org_id: str, repo_name: str, ruleset_id: int, name: str) -> None:
+        print_debug(f"deleting repo ruleset with name '{name}' for repo '{org_id}/{repo_name}'")
+
+        response = self.requester.request_raw("DELETE", f"/repos/{org_id}/{repo_name}/rulesets/{ruleset_id}")
+
+        if response.status_code != 204:
+            raise RuntimeError(f"failed to delete repo ruleset with name '{name}'")
+
+        print_debug(f"removed repo ruleset with name '{name}'")
+
     @staticmethod
     def _render_template_content(org_id: str, repo_name: str, content: str) -> str:
         variables = {"org": org_id, "repo": repo_name}
diff --git a/otterdog/providers/github/rest/requester.py b/otterdog/providers/github/rest/requester.py
index ad76f7558c9afb8cb7ea817bc8abfb17a26644df..084fd1db0277aa5856cd999874c1ce37f84b2ae9 100644
--- a/otterdog/providers/github/rest/requester.py
+++ b/otterdog/providers/github/rest/requester.py
@@ -40,6 +40,7 @@ class Requester:
         # enable logging for requests_cache
         # import logging
         #
+        #
         # logging.basicConfig(level='DEBUG')
 
         self._session: CachedSession = CachedSession(
@@ -183,8 +184,9 @@ class Requester:
 
         async with AsyncCachedSession(cache=FileBackend(cache_name=_AIOHTTP_CACHE_DIR, use_temp=False)) as session:
             url = self._build_url(url_path)
-            key = session.cache.create_key(method, url)
+            key = session.cache.create_key(method, url, params=params)
             cached_response = await session.cache.get_response(key)
+
             if cached_response is not None and method == "GET":
                 # if the url is present in the cache, try to refresh it from the server
                 refresh_headers = headers.copy()
@@ -242,7 +244,7 @@ class Requester:
                 if is_debug_enabled():
                     if not response.from_cache:  # type: ignore
                         print_debug(
-                            f"'{method}' {url_path}: rate-linit-used = {response.headers.get('x-ratelimit-used', None)}"
+                            f"'{method}' {url_path}: rate-limit-used = {response.headers.get('x-ratelimit-used', None)}"
                         )
 
                 if is_trace_enabled():
diff --git a/otterdog/resources/schemas/repository-ruleset.json b/otterdog/resources/schemas/repository-ruleset.json
new file mode 100644
index 0000000000000000000000000000000000000000..2eacf864402e54e9a45f12164b5936e84d3f49bf
--- /dev/null
+++ b/otterdog/resources/schemas/repository-ruleset.json
@@ -0,0 +1,64 @@
+{
+  "$id": "repository-ruleset.json",
+
+  "type": "object",
+  "properties": {
+    "name": { "type": "string" },
+    "enforcement": { "type": "string" },
+
+    "bypass_actors": {
+      "type": "array",
+      "items": { "type": "string" }
+    },
+
+    "include_refs": {
+      "type": "array",
+      "items": { "type": "string" }
+    },
+    "exclude_refs": {
+      "type": "array",
+      "items": { "type": "string" }
+    },
+
+    "allows_creations": { "type": "boolean" },
+    "allows_deletions": { "type": "boolean" },
+    "allows_updates": { "type": "boolean" },
+
+    "allows_force_pushes": { "type": "boolean" },
+
+    "requires_commit_signatures": { "type": "boolean" },
+    "requires_linear_history": { "type": "boolean" },
+
+    "requires_pull_request": { "type": "boolean" },
+    "required_approving_review_count": { "$ref": "#/definitions/integer_or_null" },
+    "dismisses_stale_reviews": { "type": "boolean" },
+    "requires_code_owner_review": { "type": "boolean" },
+    "requires_last_push_approval": { "type": "boolean" },
+    "requires_review_thread_resolution": { "type": "boolean" },
+
+    "requires_deployments": { "type":  "boolean" },
+    "required_deployment_environments": {
+      "type": "array",
+      "items": { "type": "string" }
+    },
+
+    "requires_status_checks": { "type": "boolean" },
+    "requires_strict_status_checks": { "type": "boolean" },
+    "required_status_checks": {
+      "type": "array",
+      "items": { "type": "string" }
+    }
+  },
+
+  "required": [ "name", "enforcement" ],
+  "additionalProperties": false,
+
+  "definitions": {
+    "integer_or_null": {
+      "anyOf": [
+          { "type": "integer" },
+          { "type": "null" }
+        ]
+    }
+  }
+}
\ No newline at end of file
diff --git a/otterdog/resources/schemas/repository.json b/otterdog/resources/schemas/repository.json
index 5a43a6573df48e740bb34d7c29c47069bd74beac..f6051e57bee88174db9316123f89472194d90dbe 100644
--- a/otterdog/resources/schemas/repository.json
+++ b/otterdog/resources/schemas/repository.json
@@ -52,6 +52,10 @@
       "type": "array",
       "items": { "$ref": "branch-protection-rule.json" }
     },
+    "rulesets": {
+      "type": "array",
+      "items": { "$ref": "repository-ruleset.json" }
+    },
     "webhooks": {
       "type": "array",
       "items": { "$ref": "webhook.json" }