1
0
mirror of https://github.com/certbot/certbot.git synced 2026-01-26 07:41:33 +03:00

Merge remote-tracking branch 'origin/2.0.x' into ecdsa-default-flag

This commit is contained in:
Alex Zorin
2022-09-27 12:38:20 +10:00
134 changed files with 1120 additions and 5789 deletions

View File

@@ -0,0 +1,18 @@
# Pipeline for testing, building, and deploying Certbot 2.0 pre-releases.
trigger: none
pr: none
variables:
# We don't publish our Docker images in this pipeline, but when building them
# for testing, let's use the nightly tag.
dockerTag: nightly
snapBuildTimeout: 5400
stages:
- template: templates/stages/test-and-package-stage.yml
- stage: DeploySnaps
jobs:
- template: templates/jobs/snap-deploy-job.yml
parameters:
snapReleaseChannel: beta
- template: templates/stages/notify-failure-stage.yml

View File

@@ -15,5 +15,5 @@ stages:
- template: templates/stages/changelog-stage.yml
- template: templates/stages/deploy-stage.yml
parameters:
snapReleaseChannel: beta
snapReleaseChannel: candidate
- template: templates/stages/notify-failure-stage.yml

View File

@@ -2,7 +2,7 @@ jobs:
- job: extended_test
variables:
- name: IMAGE_NAME
value: ubuntu-18.04
value: ubuntu-22.04
- name: PYTHON_VERSION
value: 3.10
- group: certbot-common
@@ -47,13 +47,13 @@ jobs:
nginx-compat:
TOXENV: nginx_compat
linux-integration-rfc2136:
IMAGE_NAME: ubuntu-18.04
IMAGE_NAME: ubuntu-22.04
PYTHON_VERSION: 3.8
TOXENV: integration-dns-rfc2136
docker-dev:
TOXENV: docker_dev
le-modification:
IMAGE_NAME: ubuntu-18.04
IMAGE_NAME: ubuntu-22.04
TOXENV: modification
farmtest-apache2:
PYTHON_VERSION: 3.8

View File

@@ -1,7 +1,7 @@
jobs:
- job: docker_build
pool:
vmImage: ubuntu-18.04
vmImage: ubuntu-22.04
strategy:
matrix:
amd64:
@@ -37,7 +37,7 @@ jobs:
- job: docker_run
dependsOn: docker_build
pool:
vmImage: ubuntu-18.04
vmImage: ubuntu-22.04
steps:
- task: DownloadPipelineArtifact@2
inputs:
@@ -116,7 +116,7 @@ jobs:
displayName: Run certbot integration tests
- job: snaps_build
pool:
vmImage: ubuntu-18.04
vmImage: ubuntu-22.04
strategy:
matrix:
amd64:
@@ -164,7 +164,7 @@ jobs:
- job: snap_run
dependsOn: snaps_build
pool:
vmImage: ubuntu-18.04
vmImage: ubuntu-22.04
steps:
- task: UsePythonVersion@0
inputs:
@@ -194,7 +194,7 @@ jobs:
- job: snap_dns_run
dependsOn: snaps_build
pool:
vmImage: ubuntu-18.04
vmImage: ubuntu-22.04
steps:
- script: |
set -e

View File

@@ -0,0 +1,75 @@
# As (somewhat) described at
# https://docs.microsoft.com/en-us/azure/devops/pipelines/process/templates?view=azure-devops#context,
# each template only has access to the parameters passed into it. To help make
# use of this design, we define snapReleaseChannel without a default value
# which requires the user of this template to define it as described at
# https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema/parameters-name?view=azure-pipelines#remarks.
# This makes the user of this template be explicit while allowing them to
# define their own parameters with defaults that make sense for that context.
parameters:
- name: snapReleaseChannel
type: string
values:
- edge
- beta
- candidate
jobs:
# This job relies on credentials used to publish the Certbot snaps. This
# credential file was created by running:
#
# snapcraft logout
# snapcraft export-login --channels=candidate,beta,edge snapcraft.cfg
# (provide the shared snapcraft credentials when prompted)
#
# Then the file was added as a secure file in Azure pipelines
# with the name snapcraft.cfg by following the instructions at
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/secure-files?view=azure-devops
# including authorizing the file for use in the "nightly" and "release"
# pipelines as described at
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/secure-files?view=azure-devops#q-how-do-i-authorize-a-secure-file-for-use-in-a-specific-pipeline.
#
# This file has a maximum lifetime of one year and the current file will
# expire on 2023-09-06. The file will need to be updated before then to
# prevent automated deploys from breaking.
#
# Revoking these credentials can be done by changing the password of the
# account used to generate the credentials. See
# https://forum.snapcraft.io/t/revoking-exported-credentials/19031 for
# more info.
- job: publish_snap
pool:
vmImage: ubuntu-22.04
variables:
- group: certbot-common
strategy:
matrix:
amd64:
SNAP_ARCH: amd64
arm32v6:
SNAP_ARCH: armhf
arm64v8:
SNAP_ARCH: arm64
steps:
- bash: |
set -e
sudo apt-get update
sudo apt-get install -y --no-install-recommends snapd
sudo snap install --classic snapcraft
displayName: Install dependencies
- task: DownloadPipelineArtifact@2
inputs:
artifact: snaps_$(SNAP_ARCH)
path: $(Build.SourcesDirectory)/snap
displayName: Retrieve Certbot snaps
- task: DownloadSecureFile@1
name: snapcraftCfg
inputs:
secureFile: snapcraft.cfg
- bash: |
set -e
export SNAPCRAFT_STORE_CREDENTIALS=$(cat "$(snapcraftCfg.secureFilePath)")
for SNAP_FILE in snap/*.snap; do
tools/retry.sh eval snapcraft upload --release=${{ parameters.snapReleaseChannel }} "${SNAP_FILE}"
done
displayName: Publish to Snap store

View File

@@ -5,11 +5,11 @@ jobs:
strategy:
matrix:
macos-py37-cover:
IMAGE_NAME: macOS-10.15
IMAGE_NAME: macOS-12
PYTHON_VERSION: 3.7
TOXENV: py37-cover
macos-py310-cover:
IMAGE_NAME: macOS-10.15
IMAGE_NAME: macOS-12
PYTHON_VERSION: 3.10
TOXENV: py310-cover
windows-py37:
@@ -25,42 +25,42 @@ jobs:
PYTHON_VERSION: 3.9
TOXENV: integration-certbot
linux-oldest-tests-1:
IMAGE_NAME: ubuntu-18.04
IMAGE_NAME: ubuntu-22.04
PYTHON_VERSION: 3.7
TOXENV: '{acme,apache,apache-v2,certbot}-oldest'
linux-oldest-tests-2:
IMAGE_NAME: ubuntu-18.04
IMAGE_NAME: ubuntu-22.04
PYTHON_VERSION: 3.7
TOXENV: '{dns,nginx}-oldest'
linux-py37:
IMAGE_NAME: ubuntu-18.04
IMAGE_NAME: ubuntu-22.04
PYTHON_VERSION: 3.7
TOXENV: py37
linux-py310-cover:
IMAGE_NAME: ubuntu-18.04
IMAGE_NAME: ubuntu-22.04
PYTHON_VERSION: 3.10
TOXENV: py310-cover
linux-py310-lint:
IMAGE_NAME: ubuntu-18.04
IMAGE_NAME: ubuntu-22.04
PYTHON_VERSION: 3.10
TOXENV: lint-posix
linux-py310-mypy:
IMAGE_NAME: ubuntu-18.04
IMAGE_NAME: ubuntu-22.04
PYTHON_VERSION: 3.10
TOXENV: mypy-posix
linux-integration:
IMAGE_NAME: ubuntu-18.04
IMAGE_NAME: ubuntu-22.04
PYTHON_VERSION: 3.8
TOXENV: integration
ACME_SERVER: pebble
apache-compat:
IMAGE_NAME: ubuntu-18.04
IMAGE_NAME: ubuntu-22.04
TOXENV: apache_compat
apacheconftest:
IMAGE_NAME: ubuntu-18.04
IMAGE_NAME: ubuntu-22.04
TOXENV: apacheconftest-with-pebble
nginxroundtrip:
IMAGE_NAME: ubuntu-18.04
IMAGE_NAME: ubuntu-22.04
TOXENV: nginxroundtrip
pool:
vmImage: $(IMAGE_NAME)

View File

@@ -1,77 +1,19 @@
parameters:
# We do not define acceptable values for this parameter here as it is passed
# through to ../jobs/snap-deploy-job.yml which does its own sanity checking.
- name: snapReleaseChannel
type: string
default: edge
values:
- edge
- beta
stages:
- stage: Deploy
jobs:
# This job relies on credentials used to publish the Certbot snaps. This
# credential file was created by running:
#
# snapcraft logout
# snapcraft export-login --channels=beta,edge snapcraft.cfg
# (provide the shared snapcraft credentials when prompted)
#
# Then the file was added as a secure file in Azure pipelines
# with the name snapcraft.cfg by following the instructions at
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/secure-files?view=azure-devops
# including authorizing the file for use in the "nightly" and "release"
# pipelines as described at
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/secure-files?view=azure-devops#q-how-do-i-authorize-a-secure-file-for-use-in-a-specific-pipeline.
#
# This file has a maximum lifetime of one year and the current
# file will expire on 2023-06-17 which is also tracked by
# https://github.com/certbot/certbot/issues/7931. The file will
# need to be updated before then to prevent automated deploys
# from breaking.
#
# Revoking these credentials can be done by changing the password of the
# account used to generate the credentials. See
# https://forum.snapcraft.io/t/revoking-exported-credentials/19031 for
# more info.
- job: publish_snap
pool:
vmImage: ubuntu-18.04
variables:
- group: certbot-common
strategy:
matrix:
amd64:
SNAP_ARCH: amd64
arm32v6:
SNAP_ARCH: armhf
arm64v8:
SNAP_ARCH: arm64
steps:
- bash: |
set -e
sudo apt-get update
sudo apt-get install -y --no-install-recommends snapd
sudo snap install --classic snapcraft
displayName: Install dependencies
- task: DownloadPipelineArtifact@2
inputs:
artifact: snaps_$(SNAP_ARCH)
path: $(Build.SourcesDirectory)/snap
displayName: Retrieve Certbot snaps
- task: DownloadSecureFile@1
name: snapcraftCfg
inputs:
secureFile: snapcraft.cfg
- bash: |
set -e
export SNAPCRAFT_STORE_CREDENTIALS=$(cat $(snapcraftCfg.secureFilePath))
for SNAP_FILE in snap/*.snap; do
tools/retry.sh eval snapcraft upload --release=${{ parameters.snapReleaseChannel }} "${SNAP_FILE}"
done
displayName: Publish to Snap store
- template: ../jobs/snap-deploy-job.yml
parameters:
snapReleaseChannel: ${{ parameters.snapReleaseChannel }}
- job: publish_docker
pool:
vmImage: ubuntu-18.04
vmImage: ubuntu-22.04
strategy:
matrix:
amd64:
@@ -96,11 +38,16 @@ stages:
# which was created by following the instructions at
# https://docs.microsoft.com/en-us/azure/devops/pipelines/library/service-endpoints?view=azure-devops&tabs=yaml#sep-docreg.
# The name given to this service account must match the value
# given to containerRegistry below. "Grant access to all
# pipelines" should also be checked. To revoke these
# credentials, we can change the password on the certbotbot
# Docker Hub account or remove the account from the
# Certbot organization on Docker Hub.
# given to containerRegistry below. The authentication used when
# creating this service account was a personal access token
# rather than a password to bypass 2FA. When Brad set this up,
# Azure Pipelines failed to verify the credentials with an error
# like "access is forbidden with a JWT issued from a personal
# access token", but after saving them without verification, the
# access token worked when the pipeline actually ran. "Grant
# access to all pipelines" should also be checked on the service
# account. The access token can be deleted on Docker Hub if
# these credentials need to be revoked.
containerRegistry: docker-hub
displayName: Login to Docker Hub
- bash: set -e && tools/docker/deploy.sh $(dockerTag) $DOCKER_ARCH

View File

@@ -12,7 +12,7 @@ steps:
set -e
sudo apt-get update
sudo apt-get install -y --no-install-recommends \
python-dev \
python3-dev \
gcc \
libaugeas0 \
libssl-dev \
@@ -36,8 +36,8 @@ steps:
# problems with its lack of real dependency resolution.
- bash: |
set -e
python tools/pipstrap.py
python tools/pip_install.py -I tox virtualenv
python3 tools/pipstrap.py
python3 tools/pip_install.py -I tox virtualenv
displayName: Install runtime dependencies
- task: DownloadSecureFile@1
name: testFarmPem
@@ -49,7 +49,7 @@ steps:
export TARGET_BRANCH="`echo "${BUILD_SOURCEBRANCH}" | sed -E 's!refs/(heads|tags)/!!g'`"
[ -z "${SYSTEM_PULLREQUEST_TARGETBRANCH}" ] || export TARGET_BRANCH="${SYSTEM_PULLREQUEST_TARGETBRANCH}"
env
python -m tox
python3 -m tox
env:
AWS_ACCESS_KEY_ID: $(AWS_ACCESS_KEY_ID)
AWS_SECRET_ACCESS_KEY: $(AWS_SECRET_ACCESS_KEY)

View File

@@ -1,5 +1,6 @@
## Pull Request Checklist
- [ ] The Certbot team has recently expressed interest in reviewing a PR for this. If not, this PR may be closed due our limited resources and need to prioritize how we spend them.
- [ ] If the change being made is to a [distributed component](https://certbot.eff.org/docs/contributing.html#code-components-and-layout), edit the `master` section of `certbot/CHANGELOG.md` to include a description of the change being made.
- [ ] Add or update any documentation as needed to support the changes in this PR.
- [ ] Include your name in `AUTHORS.md` if you like.

View File

@@ -283,10 +283,6 @@ ignored-modules=pkg_resources,confargparse,argparse
# (useful for classes with attributes dynamically set).
ignored-classes=Field,Header,JWS,closing
# When zope mode is activated, add a predefined set of Zope acquired attributes
# to generated-members.
zope=yes
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E0201 when accessed. Python regular
# expressions are accepted.
@@ -313,9 +309,8 @@ int-import-graph=
[CLASSES]
# List of interface methods to ignore, separated by a comma. This is used for
# instance to not check methods defined in Zope's Interface base class.
ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by,implementedBy,providedBy
# List of interface methods to ignore, separated by a comma.
ignore-iface-methods=
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,__new__,setUp

View File

@@ -178,6 +178,7 @@ Authors
* [Mathieu Leduc-Hamel](https://github.com/mlhamel)
* [Matt Bostock](https://github.com/mattbostock)
* [Matthew Ames](https://github.com/SuperMatt)
* [Matthew W. Thomas](https://github.com/mwt)
* [Michael Schumacher](https://github.com/schumaml)
* [Michael Strache](https://github.com/Jarodiv)
* [Michael Sverdlin](https://github.com/sveder)
@@ -215,6 +216,7 @@ Authors
* [Pierre Jaury](https://github.com/kaiyou)
* [Piotr Kasprzyk](https://github.com/kwadrat)
* [Prayag Verma](https://github.com/pra85)
* [Preston Locke](https://github.com/Preston12321)
* [Rasesh Patel](https://github.com/raspat1)
* [Reinaldo de Souza Jr](https://github.com/juniorz)
* [Remi Rampin](https://github.com/remram44)

View File

@@ -23,9 +23,6 @@ import requests
from acme import crypto_util
from acme import errors
from acme import fields
from acme.mixins import ResourceMixin
from acme.mixins import TypeMixin
logger = logging.getLogger(__name__)
@@ -47,12 +44,17 @@ class Challenge(jose.TypedJSONObjectWithFields):
return UnrecognizedChallenge.from_json(jobj)
class ChallengeResponse(ResourceMixin, TypeMixin, jose.TypedJSONObjectWithFields):
class ChallengeResponse(jose.TypedJSONObjectWithFields):
# _fields_to_partial_json
"""ACME challenge response."""
TYPES: Dict[str, Type['ChallengeResponse']] = {}
resource_type = 'challenge'
resource: str = fields.resource(resource_type)
def to_partial_json(self) -> Dict[str, Any]:
# Removes the `type` field which is inserted by TypedJSONObjectWithFields.to_partial_json.
# This field breaks RFC8555 compliance.
jobj = super().to_partial_json()
jobj.pop(self.type_field_name, None)
return jobj
class UnrecognizedChallenge(Challenge):
@@ -408,7 +410,7 @@ class TLSALPN01Response(KeyAuthorizationChallengeResponse):
"""
ID_PE_ACME_IDENTIFIER_V1 = b"1.3.6.1.5.5.7.1.30.1"
ACME_TLS_1_PROTOCOL = "acme-tls/1"
ACME_TLS_1_PROTOCOL = b"acme-tls/1"
@property
def h(self) -> bytes:

File diff suppressed because it is too large Load Diff

View File

@@ -11,6 +11,7 @@ from typing import Callable
from typing import List
from typing import Mapping
from typing import Optional
from typing import Sequence
from typing import Set
from typing import Tuple
from typing import Union
@@ -39,7 +40,9 @@ class _DefaultCertSelection:
def __call__(self, connection: SSL.Connection) -> Optional[Tuple[crypto.PKey, crypto.X509]]:
server_name = connection.get_servername()
return self.certs.get(server_name, None)
if server_name:
return self.certs.get(server_name, None)
return None # pragma: no cover
class SSLSocket: # pylint: disable=too-few-public-methods
@@ -60,7 +63,8 @@ class SSLSocket: # pylint: disable=too-few-public-methods
method: int = _DEFAULT_SSL_METHOD,
alpn_selection: Optional[Callable[[SSL.Connection, List[bytes]], bytes]] = None,
cert_selection: Optional[Callable[[SSL.Connection],
Tuple[crypto.PKey, crypto.X509]]] = None
Optional[Tuple[crypto.PKey,
crypto.X509]]]] = None
) -> None:
self.sock = sock
self.alpn_selection = alpn_selection
@@ -71,8 +75,8 @@ class SSLSocket: # pylint: disable=too-few-public-methods
raise ValueError("Both cert_selection and certs specified.")
actual_cert_selection: Union[_DefaultCertSelection,
Optional[Callable[[SSL.Connection],
Tuple[crypto.PKey,
crypto.X509]]]] = cert_selection
Optional[Tuple[crypto.PKey,
crypto.X509]]]]] = cert_selection
if actual_cert_selection is None:
actual_cert_selection = _DefaultCertSelection(certs if certs else {})
self.cert_selection = actual_cert_selection
@@ -157,7 +161,7 @@ class SSLSocket: # pylint: disable=too-few-public-methods
def probe_sni(name: bytes, host: bytes, port: int = 443, timeout: int = 300, # pylint: disable=too-many-arguments
method: int = _DEFAULT_SSL_METHOD, source_address: Tuple[str, int] = ('', 0),
alpn_protocols: Optional[List[str]] = None) -> crypto.X509:
alpn_protocols: Optional[Sequence[bytes]] = None) -> crypto.X509:
"""Probe SNI server for SSL certificate.
:param bytes name: Byte string to send as the server name in the
@@ -170,7 +174,7 @@ def probe_sni(name: bytes, host: bytes, port: int = 443, timeout: int = 300, #
of source interface). See `socket.creation_connection` for more
info. Available only in Python 2.7+.
:param alpn_protocols: Protocols to request using ALPN.
:type alpn_protocols: `list` of `str`
:type alpn_protocols: `Sequence` of `bytes`
:raises acme.errors.Error: In case of any problems.
@@ -207,7 +211,9 @@ def probe_sni(name: bytes, host: bytes, port: int = 443, timeout: int = 300, #
client_ssl.shutdown()
except SSL.Error as error:
raise errors.Error(error)
return client_ssl.get_peer_certificate()
cert = client_ssl.get_peer_certificate()
assert cert # Appease mypy. We would have crashed out by now if there was no certificate.
return cert
def make_csr(private_key_pem: bytes, domains: Optional[Union[Set[str], List[str]]] = None,

View File

@@ -51,22 +51,6 @@ class RFC3339Field(jose.Field):
raise jose.DeserializationError(error)
class Resource(jose.Field):
"""Resource MITM field."""
def __init__(self, resource_type: str, *args: Any, **kwargs: Any) -> None:
self.resource_type = resource_type
kwargs['default'] = resource_type
super().__init__('resource', *args, **kwargs)
def decode(self, value: Any) -> Any:
if value != self.resource_type:
raise jose.DeserializationError(
'Wrong resource type: {0} instead of {1}'.format(
value, self.resource_type))
return value
def fixed(json_name: str, value: Any) -> Any:
"""Generates a type-friendly Fixed field."""
return Fixed(json_name, value)
@@ -75,8 +59,3 @@ def fixed(json_name: str, value: Any) -> Any:
def rfc3339(json_name: str, omitempty: bool = False) -> Any:
"""Generates a type-friendly RFC3339 field."""
return RFC3339Field(json_name, omitempty=omitempty)
def resource(resource_type: str) -> Any:
"""Generates a type-friendly Resource field."""
return Resource(resource_type)

View File

@@ -1,18 +0,0 @@
"""Simple shim around the typing module.
This was useful when this code supported Python 2 and typing wasn't always
available. This code is being kept for now for backwards compatibility.
"""
import warnings
from typing import * # pylint: disable=wildcard-import, unused-wildcard-import
from typing import Any
warnings.warn("acme.magic_typing is deprecated and will be removed in a future release.",
DeprecationWarning)
class TypingClass:
"""Ignore import errors by getting anything"""
def __getattr__(self, name: str) -> Any:
return None # pragma: no cover

View File

@@ -11,9 +11,7 @@ from typing import MutableMapping
from typing import Optional
from typing import Tuple
from typing import Type
from typing import TYPE_CHECKING
from typing import TypeVar
from typing import Union
import josepy as jose
@@ -22,14 +20,8 @@ from acme import errors
from acme import fields
from acme import jws
from acme import util
from acme.mixins import ResourceMixin
if TYPE_CHECKING:
from typing_extensions import Protocol # pragma: no cover
else:
Protocol = object
OLD_ERROR_PREFIX = "urn:acme:error:"
ERROR_PREFIX = "urn:ietf:params:acme:error:"
ERROR_CODES = {
@@ -67,15 +59,13 @@ ERROR_CODES = {
ERROR_TYPE_DESCRIPTIONS = {**{
ERROR_PREFIX + name: desc for name, desc in ERROR_CODES.items()
}, **{ # add errors with old prefix, deprecate me
OLD_ERROR_PREFIX + name: desc for name, desc in ERROR_CODES.items()
}}
def is_acme_error(err: BaseException) -> bool:
"""Check if argument is an ACME error."""
if isinstance(err, Error) and (err.typ is not None):
return (ERROR_PREFIX in err.typ) or (OLD_ERROR_PREFIX in err.typ)
return ERROR_PREFIX in err.typ
return False
@@ -223,25 +213,15 @@ STATUS_READY = Status('ready')
STATUS_DEACTIVATED = Status('deactivated')
class HasResourceType(Protocol):
"""
Represents a class with a resource_type class parameter of type string.
"""
resource_type: str = NotImplemented
GenericHasResourceType = TypeVar("GenericHasResourceType", bound=HasResourceType)
class Directory(jose.JSONDeSerializable):
"""Directory."""
"""Directory.
_REGISTERED_TYPES: Dict[str, Type[HasResourceType]] = {}
Directory resources must be accessed by the exact field name in RFC8555 (section 9.7.5).
"""
class Meta(jose.JSONObjectWithFields):
"""Directory Meta."""
_terms_of_service: str = jose.field('terms-of-service', omitempty=True)
_terms_of_service_v2: str = jose.field('termsOfService', omitempty=True)
_terms_of_service: str = jose.field('termsOfService', omitempty=True)
website: str = jose.field('website', omitempty=True)
caa_identities: List[str] = jose.field('caaIdentities', omitempty=True)
external_account_required: bool = jose.field('externalAccountRequired', omitempty=True)
@@ -253,7 +233,7 @@ class Directory(jose.JSONDeSerializable):
@property
def terms_of_service(self) -> str:
"""URL for the CA TOS"""
return self._terms_of_service or self._terms_of_service_v2
return self._terms_of_service
def __iter__(self) -> Iterator[str]:
# When iterating over fields, use the external name 'terms_of_service' instead of
@@ -264,41 +244,23 @@ class Directory(jose.JSONDeSerializable):
def _internal_name(self, name: str) -> str:
return '_' + name if name == 'terms_of_service' else name
@classmethod
def _canon_key(cls, key: Union[str, HasResourceType, Type[HasResourceType]]) -> str:
if isinstance(key, str):
return key
return key.resource_type
@classmethod
def register(cls,
resource_body_cls: Type[GenericHasResourceType]) -> Type[GenericHasResourceType]:
"""Register resource."""
resource_type = resource_body_cls.resource_type
assert resource_type not in cls._REGISTERED_TYPES
cls._REGISTERED_TYPES[resource_type] = resource_body_cls
return resource_body_cls
def __init__(self, jobj: Mapping[str, Any]) -> None:
canon_jobj = util.map_keys(jobj, self._canon_key)
# TODO: check that everything is an absolute URL; acme-spec is
# not clear on that
self._jobj = canon_jobj
self._jobj = jobj
def __getattr__(self, name: str) -> Any:
try:
return self[name.replace('_', '-')]
return self[name]
except KeyError as error:
raise AttributeError(str(error))
def __getitem__(self, name: Union[str, HasResourceType, Type[HasResourceType]]) -> Any:
def __getitem__(self, name: str) -> Any:
try:
return self._jobj[self._canon_key(name)]
return self._jobj[name]
except KeyError:
raise KeyError('Directory field "' + self._canon_key(name) + '" not found')
raise KeyError(f'Directory field "{name}" not found')
def to_partial_json(self) -> Dict[str, Any]:
return self._jobj
return util.map_keys(self._jobj, lambda k: k)
@classmethod
def from_json(cls, jobj: MutableMapping[str, Any]) -> 'Directory':
@@ -459,17 +421,12 @@ class Registration(ResourceBody):
return self._filter_contact(self.email_prefix)
@Directory.register
class NewRegistration(ResourceMixin, Registration):
class NewRegistration(Registration):
"""New registration."""
resource_type = 'new-reg'
resource: str = fields.resource(resource_type)
class UpdateRegistration(ResourceMixin, Registration):
class UpdateRegistration(Registration):
"""Update registration."""
resource_type = 'reg'
resource: str = fields.resource(resource_type)
class RegistrationResource(ResourceWithURI):
@@ -507,7 +464,6 @@ class ChallengeBody(ResourceBody):
# challenge object supports either one, but should be accessed through the
# name "uri". In Client.answer_challenge, whichever one is set will be
# used.
_uri: str = jose.field('uri', omitempty=True, default=None)
_url: str = jose.field('url', omitempty=True, default=None)
status: Status = jose.field('status', decoder=Status.from_json,
omitempty=True, default=STATUS_PENDING)
@@ -536,7 +492,7 @@ class ChallengeBody(ResourceBody):
@property
def uri(self) -> str:
"""The URL of this challenge."""
return self._url or self._uri
return self._url
def __getattr__(self, name: str) -> Any:
return getattr(self.chall, name)
@@ -545,10 +501,10 @@ class ChallengeBody(ResourceBody):
# When iterating over fields, use the external name 'uri' instead of
# the internal '_uri'.
for name in super().__iter__():
yield name[1:] if name == '_uri' else name
yield 'uri' if name == '_url' else name
def _internal_name(self, name: str) -> str:
return '_' + name if name == 'uri' else name
return '_url' if name == 'uri' else name
class ChallengeResource(Resource):
@@ -572,15 +528,12 @@ class Authorization(ResourceBody):
:ivar acme.messages.Identifier identifier:
:ivar list challenges: `list` of `.ChallengeBody`
:ivar tuple combinations: Challenge combinations (`tuple` of `tuple`
of `int`, as opposed to `list` of `list` from the spec).
:ivar acme.messages.Status status:
:ivar datetime.datetime expires:
"""
identifier: Identifier = jose.field('identifier', decoder=Identifier.from_json, omitempty=True)
challenges: List[ChallengeBody] = jose.field('challenges', omitempty=True)
combinations: Tuple[Tuple[int, ...], ...] = jose.field('combinations', omitempty=True)
status: Status = jose.field('status', omitempty=True, decoder=Status.from_json)
# TODO: 'expires' is allowed for Authorization Resources in
@@ -596,24 +549,13 @@ class Authorization(ResourceBody):
def challenges(value: List[Dict[str, Any]]) -> Tuple[ChallengeBody, ...]: # type: ignore[misc] # pylint: disable=no-self-argument,missing-function-docstring
return tuple(ChallengeBody.from_json(chall) for chall in value)
@property
def resolved_combinations(self) -> Tuple[Tuple[ChallengeBody, ...], ...]:
"""Combinations with challenges instead of indices."""
return tuple(tuple(self.challenges[idx] for idx in combo)
for combo in self.combinations) # pylint: disable=not-an-iterable
@Directory.register
class NewAuthorization(ResourceMixin, Authorization):
class NewAuthorization(Authorization):
"""New authorization."""
resource_type = 'new-authz'
resource: str = fields.resource(resource_type)
class UpdateAuthorization(ResourceMixin, Authorization):
class UpdateAuthorization(Authorization):
"""Update authorization."""
resource_type = 'authz'
resource: str = fields.resource(resource_type)
class AuthorizationResource(ResourceWithURI):
@@ -627,16 +569,13 @@ class AuthorizationResource(ResourceWithURI):
new_cert_uri: str = jose.field('new_cert_uri', omitempty=True)
@Directory.register
class CertificateRequest(ResourceMixin, jose.JSONObjectWithFields):
"""ACME new-cert request.
class CertificateRequest(jose.JSONObjectWithFields):
"""ACME newOrder request.
:ivar jose.ComparableX509 csr:
`OpenSSL.crypto.X509Req` wrapped in `.ComparableX509`
"""
resource_type = 'new-cert'
resource: str = fields.resource(resource_type)
csr: jose.ComparableX509 = jose.field('csr', decoder=jose.decode_csr, encoder=jose.encode_csr)
@@ -653,16 +592,13 @@ class CertificateResource(ResourceWithURI):
authzrs: Tuple[AuthorizationResource, ...] = jose.field('authzrs')
@Directory.register
class Revocation(ResourceMixin, jose.JSONObjectWithFields):
class Revocation(jose.JSONObjectWithFields):
"""Revocation message.
:ivar jose.ComparableX509 certificate: `OpenSSL.crypto.X509` wrapped in
`jose.ComparableX509`
"""
resource_type = 'revoke-cert'
resource: str = fields.resource(resource_type)
certificate: jose.ComparableX509 = jose.field(
'certificate', decoder=jose.decode_cert, encoder=jose.encode_cert)
reason: int = jose.field('reason')
@@ -719,7 +655,5 @@ class OrderResource(ResourceWithURI):
omitempty=True)
@Directory.register
class NewOrder(Order):
"""New order."""
resource_type = 'new-order'

View File

@@ -1,68 +0,0 @@
"""Useful mixins for Challenge and Resource objects"""
from typing import Any
from typing import Dict
class VersionedLEACMEMixin:
"""This mixin stores the version of Let's Encrypt's endpoint being used."""
@property
def le_acme_version(self) -> int:
"""Define the version of ACME protocol to use"""
return getattr(self, '_le_acme_version', 1)
@le_acme_version.setter
def le_acme_version(self, version: int) -> None:
# We need to use object.__setattr__ to not depend on the specific implementation of
# __setattr__ in current class (eg. jose.TypedJSONObjectWithFields raises AttributeError
# for any attempt to set an attribute to make objects immutable).
object.__setattr__(self, '_le_acme_version', version)
def __setattr__(self, key: str, value: Any) -> None:
if key == 'le_acme_version':
# Required for @property to operate properly. See comment above.
object.__setattr__(self, key, value)
else:
super().__setattr__(key, value) # pragma: no cover
class ResourceMixin(VersionedLEACMEMixin):
"""
This mixin generates a RFC8555 compliant JWS payload
by removing the `resource` field if needed (eg. ACME v2 protocol).
"""
def to_partial_json(self) -> Dict[str, Any]:
"""See josepy.JSONDeserializable.to_partial_json()"""
return _safe_jobj_compliance(super(),
'to_partial_json', 'resource')
def fields_to_partial_json(self) -> Dict[str, Any]:
"""See josepy.JSONObjectWithFields.fields_to_partial_json()"""
return _safe_jobj_compliance(super(),
'fields_to_partial_json', 'resource')
class TypeMixin(VersionedLEACMEMixin):
"""
This mixin allows generation of a RFC8555 compliant JWS payload
by removing the `type` field if needed (eg. ACME v2 protocol).
"""
def to_partial_json(self) -> Dict[str, Any]:
"""See josepy.JSONDeserializable.to_partial_json()"""
return _safe_jobj_compliance(super(),
'to_partial_json', 'type')
def fields_to_partial_json(self) -> Dict[str, Any]:
"""See josepy.JSONObjectWithFields.fields_to_partial_json()"""
return _safe_jobj_compliance(super(),
'fields_to_partial_json', 'type')
def _safe_jobj_compliance(instance: Any, jobj_method: str,
uncompliant_field: str) -> Dict[str, Any]:
if hasattr(instance, jobj_method):
jobj: Dict[str, Any] = getattr(instance, jobj_method)()
if instance.le_acme_version == 2:
jobj.pop(uncompliant_field, None)
return jobj
raise AttributeError(f'Method {jobj_method}() is not implemented.') # pragma: no cover

View File

@@ -46,10 +46,12 @@ class TLSServer(socketserver.TCPServer):
method=self.method))
def _cert_selection(self, connection: SSL.Connection
) -> Tuple[crypto.PKey, crypto.X509]: # pragma: no cover
) -> Optional[Tuple[crypto.PKey, crypto.X509]]: # pragma: no cover
"""Callback selecting certificate for connection."""
server_name = connection.get_servername()
return self.certs.get(server_name, None)
if server_name:
return self.certs.get(server_name, None)
return None
def server_bind(self) -> None:
self._wrap_sock()
@@ -151,7 +153,7 @@ class TLSALPN01Server(TLSServer, ACMEServerMixin):
def __init__(self, server_address: Tuple[str, int],
certs: List[Tuple[crypto.PKey, crypto.X509]],
challenge_certs: Mapping[str, Tuple[crypto.PKey, crypto.X509]],
challenge_certs: Mapping[bytes, Tuple[crypto.PKey, crypto.X509]],
ipv6: bool = False) -> None:
# We don't need to implement a request handler here because the work
# (including logging) is being done by wrapped socket set up in the
@@ -161,7 +163,8 @@ class TLSALPN01Server(TLSServer, ACMEServerMixin):
ipv6=ipv6)
self.challenge_certs = challenge_certs
def _cert_selection(self, connection: SSL.Connection) -> Tuple[crypto.PKey, crypto.X509]:
def _cert_selection(self, connection: SSL.Connection) -> Optional[Tuple[crypto.PKey,
crypto.X509]]:
# TODO: We would like to serve challenge cert only if asked for it via
# ALPN. To do this, we need to retrieve the list of protos from client
# hello, but this is currently impossible with openssl [0], and ALPN
@@ -170,8 +173,10 @@ class TLSALPN01Server(TLSServer, ACMEServerMixin):
# handshake in alpn_selection() if ALPN protos are not what we expect.
# [0] https://github.com/openssl/openssl/issues/4952
server_name = connection.get_servername()
logger.debug("Serving challenge cert for server name %s", server_name)
return self.challenge_certs[server_name]
if server_name:
logger.debug("Serving challenge cert for server name %s", server_name)
return self.challenge_certs[server_name]
return None # pragma: no cover
def _alpn_selection(self, _connection: SSL.Connection, alpn_protos: List[bytes]) -> bytes:
"""Callback to select alpn protocol."""

View File

@@ -163,7 +163,7 @@ def example_http():
# Register account and accept TOS
net = client.ClientNetwork(acc_key, user_agent=USER_AGENT)
directory = messages.Directory.from_json(net.get(DIRECTORY_URL).json())
directory = client.ClientV2.get_directory(DIRECTORY_URL, net)
client_acme = client.ClientV2(directory, net=net)
# Terms of Service URL is in client_acme.directory.meta.terms_of_service
@@ -215,8 +215,7 @@ def example_http():
try:
regr = client_acme.query_registration(regr)
except errors.Error as err:
if err.typ == messages.OLD_ERROR_PREFIX + 'unauthorized' \
or err.typ == messages.ERROR_PREFIX + 'unauthorized':
if err.typ == messages.ERROR_PREFIX + 'unauthorized':
# Status is deactivated.
pass
raise

View File

@@ -3,7 +3,7 @@ import sys
from setuptools import find_packages
from setuptools import setup
version = '1.30.0.dev0'
version = '1.31.0.dev0'
install_requires = [
'cryptography>=2.5.0',
@@ -12,7 +12,6 @@ install_requires = [
'pyrfc3339',
'pytz>=2019.3',
'requests>=2.20.0',
'requests-toolbelt>=0.3.0',
'setuptools>=41.6.0',
]

View File

@@ -92,8 +92,7 @@ class DNS01ResponseTest(unittest.TestCase):
self.response = self.chall.response(KEY)
def test_to_partial_json(self):
self.assertEqual({k: v for k, v in self.jmsg.items() if k != 'keyAuthorization'},
self.msg.to_partial_json())
self.assertEqual({}, self.msg.to_partial_json())
def test_from_json(self):
from acme.challenges import DNS01Response
@@ -163,8 +162,7 @@ class HTTP01ResponseTest(unittest.TestCase):
self.response = self.chall.response(KEY)
def test_to_partial_json(self):
self.assertEqual({k: v for k, v in self.jmsg.items() if k != 'keyAuthorization'},
self.msg.to_partial_json())
self.assertEqual({}, self.msg.to_partial_json())
def test_from_json(self):
from acme.challenges import HTTP01Response
@@ -274,8 +272,7 @@ class TLSALPN01ResponseTest(unittest.TestCase):
}
def test_to_partial_json(self):
self.assertEqual({k: v for k, v in self.jmsg.items() if k != 'keyAuthorization'},
self.response.to_partial_json())
self.assertEqual({}, self.response.to_partial_json())
def test_from_json(self):
from acme.challenges import TLSALPN01Response
@@ -328,12 +325,12 @@ class TLSALPN01ResponseTest(unittest.TestCase):
mock_gethostbyname.assert_called_once_with('foo.com')
mock_probe_sni.assert_called_once_with(
host=b'127.0.0.1', port=self.response.PORT, name=b'foo.com',
alpn_protocols=['acme-tls/1'])
alpn_protocols=[b'acme-tls/1'])
self.response.probe_cert('foo.com', host='8.8.8.8')
mock_probe_sni.assert_called_with(
host=b'8.8.8.8', port=mock.ANY, name=b'foo.com',
alpn_protocols=['acme-tls/1'])
alpn_protocols=[b'acme-tls/1'])
@mock.patch('acme.challenges.TLSALPN01Response.probe_cert')
def test_simple_verify_false_on_probe_error(self, mock_probe_cert):
@@ -461,8 +458,6 @@ class DNSResponseTest(unittest.TestCase):
from acme.challenges import DNSResponse
self.msg = DNSResponse(validation=self.validation)
self.jmsg_to = {
'resource': 'challenge',
'type': 'dns',
'validation': self.validation,
}
self.jmsg_from = {
@@ -492,7 +487,6 @@ class JWSPayloadRFC8555Compliant(unittest.TestCase):
from acme.challenges import HTTP01Response
challenge_body = HTTP01Response()
challenge_body.le_acme_version = 2
jobj = challenge_body.json_dumps(indent=2).encode()
# RFC8555 states that challenge responses must have an empty payload.

View File

@@ -3,52 +3,38 @@
import copy
import datetime
import http.client as http_client
import ipaddress
import json
import unittest
from typing import Dict
from unittest import mock
import josepy as jose
import OpenSSL
import requests
from acme import challenges
from acme import errors
from acme import jws as acme_jws
from acme import messages
from acme.mixins import VersionedLEACMEMixin
from acme.client import ClientNetwork
from acme.client import ClientV2
import messages_test
import test_util
CERT_DER = test_util.load_vector('cert.der')
CERT_SAN_PEM = test_util.load_vector('cert-san.pem')
CSR_SAN_PEM = test_util.load_vector('csr-san.pem')
CSR_MIXED_PEM = test_util.load_vector('csr-mixed.pem')
KEY = jose.JWKRSA.load(test_util.load_vector('rsa512_key.pem'))
KEY2 = jose.JWKRSA.load(test_util.load_vector('rsa256_key.pem'))
DIRECTORY_V1 = messages.Directory({
messages.NewRegistration:
'https://www.letsencrypt-demo.org/acme/new-reg',
messages.Revocation:
'https://www.letsencrypt-demo.org/acme/revoke-cert',
messages.NewAuthorization:
'https://www.letsencrypt-demo.org/acme/new-authz',
messages.CertificateRequest:
'https://www.letsencrypt-demo.org/acme/new-cert',
})
DIRECTORY_V2 = messages.Directory({
'newAccount': 'https://www.letsencrypt-demo.org/acme/new-account',
'newNonce': 'https://www.letsencrypt-demo.org/acme/new-nonce',
'newOrder': 'https://www.letsencrypt-demo.org/acme/new-order',
'revokeCert': 'https://www.letsencrypt-demo.org/acme/revoke-cert',
'meta': messages.Directory.Meta(),
})
class ClientTestBase(unittest.TestCase):
"""Base for tests in acme.client."""
class ClientV2Test(unittest.TestCase):
"""Tests for acme.client.ClientV2."""
def setUp(self):
self.response = mock.MagicMock(
@@ -80,650 +66,15 @@ class ClientTestBase(unittest.TestCase):
self.authz = messages.Authorization(
identifier=messages.Identifier(
typ=messages.IDENTIFIER_FQDN, value='example.com'),
challenges=(challb,), combinations=None)
challenges=(challb,))
self.authzr = messages.AuthorizationResource(
body=self.authz, uri=authzr_uri)
# Reason code for revocation
self.rsn = 1
class BackwardsCompatibleClientV2Test(ClientTestBase):
"""Tests for acme.client.BackwardsCompatibleClientV2."""
def setUp(self):
super().setUp()
# contains a loaded cert
self.certr = messages.CertificateResource(
body=messages_test.CERT)
loaded = OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_PEM, CERT_SAN_PEM)
wrapped = jose.ComparableX509(loaded)
self.chain = [wrapped, wrapped]
self.cert_pem = OpenSSL.crypto.dump_certificate(
OpenSSL.crypto.FILETYPE_PEM, messages_test.CERT.wrapped).decode()
single_chain = OpenSSL.crypto.dump_certificate(
OpenSSL.crypto.FILETYPE_PEM, loaded).decode()
self.chain_pem = single_chain + single_chain
self.fullchain_pem = self.cert_pem + self.chain_pem
self.orderr = messages.OrderResource(
csr_pem=CSR_SAN_PEM)
def _init(self):
uri = 'http://www.letsencrypt-demo.org/directory'
from acme.client import BackwardsCompatibleClientV2
return BackwardsCompatibleClientV2(net=self.net,
key=KEY, server=uri)
def test_init_downloads_directory(self):
uri = 'http://www.letsencrypt-demo.org/directory'
from acme.client import BackwardsCompatibleClientV2
BackwardsCompatibleClientV2(net=self.net,
key=KEY, server=uri)
self.net.get.assert_called_once_with(uri)
def test_init_acme_version(self):
self.response.json.return_value = DIRECTORY_V1.to_json()
client = self._init()
self.assertEqual(client.acme_version, 1)
self.response.json.return_value = DIRECTORY_V2.to_json()
client = self._init()
self.assertEqual(client.acme_version, 2)
def test_query_registration_client_v2(self):
self.response.json.return_value = DIRECTORY_V2.to_json()
client = self._init()
self.response.json.return_value = self.regr.body.to_json()
self.response.headers = {'Location': 'https://www.letsencrypt-demo.org/acme/reg/1'}
self.assertEqual(self.regr, client.query_registration(self.regr))
def test_forwarding(self):
self.response.json.return_value = DIRECTORY_V1.to_json()
client = self._init()
self.assertEqual(client.directory, client.client.directory)
self.assertEqual(client.key, KEY)
self.assertEqual(client.deactivate_registration, client.client.deactivate_registration)
self.assertRaises(AttributeError, client.__getattr__, 'nonexistent')
self.assertRaises(AttributeError, client.__getattr__, 'new_account_and_tos')
self.assertRaises(AttributeError, client.__getattr__, 'new_account')
def test_new_account_and_tos(self):
# v2 no tos
self.response.json.return_value = DIRECTORY_V2.to_json()
with mock.patch('acme.client.ClientV2') as mock_client:
client = self._init()
client.new_account_and_tos(self.new_reg)
mock_client().new_account.assert_called_with(self.new_reg)
# v2 tos good
with mock.patch('acme.client.ClientV2') as mock_client:
mock_client().directory.meta.__contains__.return_value = True
client = self._init()
client.new_account_and_tos(self.new_reg, lambda x: True)
mock_client().new_account.assert_called_with(
self.new_reg.update(terms_of_service_agreed=True))
# v2 tos bad
with mock.patch('acme.client.ClientV2') as mock_client:
mock_client().directory.meta.__contains__.return_value = True
client = self._init()
def _tos_cb(tos):
raise errors.Error
self.assertRaises(errors.Error, client.new_account_and_tos,
self.new_reg, _tos_cb)
mock_client().new_account.assert_not_called()
# v1 yes tos
self.response.json.return_value = DIRECTORY_V1.to_json()
with mock.patch('acme.client.Client') as mock_client:
regr = mock.MagicMock(terms_of_service="TOS")
mock_client().register.return_value = regr
client = self._init()
client.new_account_and_tos(self.new_reg)
mock_client().register.assert_called_once_with(self.new_reg)
mock_client().agree_to_tos.assert_called_once_with(regr)
# v1 no tos
with mock.patch('acme.client.Client') as mock_client:
regr = mock.MagicMock(terms_of_service=None)
mock_client().register.return_value = regr
client = self._init()
client.new_account_and_tos(self.new_reg)
mock_client().register.assert_called_once_with(self.new_reg)
mock_client().agree_to_tos.assert_not_called()
@mock.patch('OpenSSL.crypto.load_certificate_request')
@mock.patch('acme.crypto_util._pyopenssl_cert_or_req_all_names')
def test_new_order_v1(self, mock__pyopenssl_cert_or_req_all_names,
unused_mock_load_certificate_request):
self.response.json.return_value = DIRECTORY_V1.to_json()
mock__pyopenssl_cert_or_req_all_names.return_value = ['example.com', 'www.example.com']
mock_csr_pem = mock.MagicMock()
with mock.patch('acme.client.Client') as mock_client:
mock_client().request_domain_challenges.return_value = mock.sentinel.auth
client = self._init()
orderr = client.new_order(mock_csr_pem)
self.assertEqual(orderr.authorizations, [mock.sentinel.auth, mock.sentinel.auth])
def test_new_order_v2(self):
self.response.json.return_value = DIRECTORY_V2.to_json()
mock_csr_pem = mock.MagicMock()
with mock.patch('acme.client.ClientV2') as mock_client:
client = self._init()
client.new_order(mock_csr_pem)
mock_client().new_order.assert_called_once_with(mock_csr_pem)
@mock.patch('acme.client.Client')
def test_finalize_order_v1_success(self, mock_client):
self.response.json.return_value = DIRECTORY_V1.to_json()
mock_client().request_issuance.return_value = self.certr
mock_client().fetch_chain.return_value = self.chain
deadline = datetime.datetime(9999, 9, 9)
client = self._init()
result = client.finalize_order(self.orderr, deadline)
self.assertEqual(result.fullchain_pem, self.fullchain_pem)
mock_client().fetch_chain.assert_called_once_with(self.certr)
@mock.patch('acme.client.Client')
def test_finalize_order_v1_fetch_chain_error(self, mock_client):
self.response.json.return_value = DIRECTORY_V1.to_json()
mock_client().request_issuance.return_value = self.certr
mock_client().fetch_chain.return_value = self.chain
mock_client().fetch_chain.side_effect = [errors.Error, self.chain]
deadline = datetime.datetime(9999, 9, 9)
client = self._init()
result = client.finalize_order(self.orderr, deadline)
self.assertEqual(result.fullchain_pem, self.fullchain_pem)
self.assertEqual(mock_client().fetch_chain.call_count, 2)
@mock.patch('acme.client.Client')
def test_finalize_order_v1_timeout(self, mock_client):
self.response.json.return_value = DIRECTORY_V1.to_json()
mock_client().request_issuance.return_value = self.certr
deadline = deadline = datetime.datetime.now() - datetime.timedelta(seconds=60)
client = self._init()
self.assertRaises(errors.TimeoutError, client.finalize_order,
self.orderr, deadline)
def test_finalize_order_v2(self):
self.response.json.return_value = DIRECTORY_V2.to_json()
mock_orderr = mock.MagicMock()
mock_deadline = mock.MagicMock()
with mock.patch('acme.client.ClientV2') as mock_client:
client = self._init()
client.finalize_order(mock_orderr, mock_deadline)
mock_client().finalize_order.assert_called_once_with(mock_orderr, mock_deadline, False)
def test_revoke(self):
self.response.json.return_value = DIRECTORY_V1.to_json()
with mock.patch('acme.client.Client') as mock_client:
client = self._init()
client.revoke(messages_test.CERT, self.rsn)
mock_client().revoke.assert_called_once_with(messages_test.CERT, self.rsn)
self.response.json.return_value = DIRECTORY_V2.to_json()
with mock.patch('acme.client.ClientV2') as mock_client:
client = self._init()
client.revoke(messages_test.CERT, self.rsn)
mock_client().revoke.assert_called_once_with(messages_test.CERT, self.rsn)
def test_update_registration(self):
self.response.json.return_value = DIRECTORY_V1.to_json()
with mock.patch('acme.client.Client') as mock_client:
client = self._init()
client.update_registration(mock.sentinel.regr, None)
mock_client().update_registration.assert_called_once_with(mock.sentinel.regr, None)
# newNonce present means it will pick acme_version 2
def test_external_account_required_true(self):
self.response.json.return_value = messages.Directory({
'newNonce': 'http://letsencrypt-test.com/acme/new-nonce',
'meta': messages.Directory.Meta(external_account_required=True),
}).to_json()
client = self._init()
self.assertTrue(client.external_account_required())
# newNonce present means it will pick acme_version 2
def test_external_account_required_false(self):
self.response.json.return_value = messages.Directory({
'newNonce': 'http://letsencrypt-test.com/acme/new-nonce',
'meta': messages.Directory.Meta(external_account_required=False),
}).to_json()
client = self._init()
self.assertFalse(client.external_account_required())
def test_external_account_required_false_v1(self):
self.response.json.return_value = messages.Directory({
'meta': messages.Directory.Meta(external_account_required=False),
}).to_json()
client = self._init()
self.assertFalse(client.external_account_required())
class ClientTest(ClientTestBase):
"""Tests for acme.client.Client."""
def setUp(self):
super().setUp()
self.directory = DIRECTORY_V1
# Registration
self.regr = self.regr.update(
terms_of_service='https://www.letsencrypt-demo.org/tos')
# Request issuance
self.certr = messages.CertificateResource(
body=messages_test.CERT, authzrs=(self.authzr,),
uri='https://www.letsencrypt-demo.org/acme/cert/1',
cert_chain_uri='https://www.letsencrypt-demo.org/ca')
from acme.client import Client
self.client = Client(
directory=self.directory, key=KEY, alg=jose.RS256, net=self.net)
def test_init_downloads_directory(self):
uri = 'http://www.letsencrypt-demo.org/directory'
from acme.client import Client
self.client = Client(
directory=uri, key=KEY, alg=jose.RS256, net=self.net)
self.net.get.assert_called_once_with(uri)
@mock.patch('acme.client.ClientNetwork')
def test_init_without_net(self, mock_net):
mock_net.return_value = mock.sentinel.net
alg = jose.RS256
from acme.client import Client
self.client = Client(
directory=self.directory, key=KEY, alg=alg)
mock_net.called_once_with(KEY, alg=alg, verify_ssl=True)
self.assertEqual(self.client.net, mock.sentinel.net)
def test_register(self):
# "Instance of 'Field' has no to_json/update member" bug:
self.response.status_code = http_client.CREATED
self.response.json.return_value = self.regr.body.to_json()
self.response.headers['Location'] = self.regr.uri
self.response.links.update({
'terms-of-service': {'url': self.regr.terms_of_service},
})
self.assertEqual(self.regr, self.client.register(self.new_reg))
# TODO: test POST call arguments
def test_update_registration(self):
# "Instance of 'Field' has no to_json/update member" bug:
self.response.headers['Location'] = self.regr.uri
self.response.json.return_value = self.regr.body.to_json()
self.assertEqual(self.regr, self.client.update_registration(self.regr))
# TODO: test POST call arguments
# TODO: split here and separate test
self.response.json.return_value = self.regr.body.update(
contact=()).to_json()
def test_deactivate_account(self):
self.response.headers['Location'] = self.regr.uri
self.response.json.return_value = self.regr.body.to_json()
self.assertEqual(self.regr,
self.client.deactivate_registration(self.regr))
def test_query_registration(self):
self.response.json.return_value = self.regr.body.to_json()
self.assertEqual(self.regr, self.client.query_registration(self.regr))
def test_agree_to_tos(self):
self.client.update_registration = mock.Mock()
self.client.agree_to_tos(self.regr)
regr = self.client.update_registration.call_args[0][0]
self.assertEqual(self.regr.terms_of_service, regr.body.agreement)
def _prepare_response_for_request_challenges(self):
self.response.status_code = http_client.CREATED
self.response.headers['Location'] = self.authzr.uri
self.response.json.return_value = self.authz.to_json()
def test_request_challenges(self):
self._prepare_response_for_request_challenges()
self.client.request_challenges(self.identifier)
self.net.post.assert_called_once_with(
self.directory.new_authz,
messages.NewAuthorization(identifier=self.identifier),
acme_version=1)
def test_request_challenges_deprecated_arg(self):
self._prepare_response_for_request_challenges()
self.client.request_challenges(self.identifier, new_authzr_uri="hi")
self.net.post.assert_called_once_with(
self.directory.new_authz,
messages.NewAuthorization(identifier=self.identifier),
acme_version=1)
def test_request_challenges_custom_uri(self):
self._prepare_response_for_request_challenges()
self.client.request_challenges(self.identifier)
self.net.post.assert_called_once_with(
'https://www.letsencrypt-demo.org/acme/new-authz', mock.ANY,
acme_version=1)
def test_request_challenges_unexpected_update(self):
self._prepare_response_for_request_challenges()
self.response.json.return_value = self.authz.update(
identifier=self.identifier.update(value='foo')).to_json()
self.assertRaises(
errors.UnexpectedUpdate, self.client.request_challenges,
self.identifier)
def test_request_challenges_wildcard(self):
wildcard_identifier = messages.Identifier(
typ=messages.IDENTIFIER_FQDN, value='*.example.org')
self.assertRaises(
errors.WildcardUnsupportedError, self.client.request_challenges,
wildcard_identifier)
def test_request_domain_challenges(self):
self.client.request_challenges = mock.MagicMock()
self.assertEqual(
self.client.request_challenges(self.identifier),
self.client.request_domain_challenges('example.com'))
def test_answer_challenge(self):
self.response.links['up'] = {'url': self.challr.authzr_uri}
self.response.json.return_value = self.challr.body.to_json()
chall_response = challenges.DNSResponse(validation=None)
self.client.answer_challenge(self.challr.body, chall_response)
# TODO: split here and separate test
self.assertRaises(errors.UnexpectedUpdate, self.client.answer_challenge,
self.challr.body.update(uri='foo'), chall_response)
def test_answer_challenge_missing_next(self):
self.assertRaises(
errors.ClientError, self.client.answer_challenge,
self.challr.body, challenges.DNSResponse(validation=None))
def test_retry_after_date(self):
self.response.headers['Retry-After'] = 'Fri, 31 Dec 1999 23:59:59 GMT'
self.assertEqual(
datetime.datetime(1999, 12, 31, 23, 59, 59),
self.client.retry_after(response=self.response, default=10))
@mock.patch('acme.client.datetime')
def test_retry_after_invalid(self, dt_mock):
dt_mock.datetime.now.return_value = datetime.datetime(2015, 3, 27)
dt_mock.timedelta = datetime.timedelta
self.response.headers['Retry-After'] = 'foooo'
self.assertEqual(
datetime.datetime(2015, 3, 27, 0, 0, 10),
self.client.retry_after(response=self.response, default=10))
@mock.patch('acme.client.datetime')
def test_retry_after_overflow(self, dt_mock):
dt_mock.datetime.now.return_value = datetime.datetime(2015, 3, 27)
dt_mock.timedelta = datetime.timedelta
dt_mock.datetime.side_effect = datetime.datetime
self.response.headers['Retry-After'] = "Tue, 116 Feb 2016 11:50:00 MST"
self.assertEqual(
datetime.datetime(2015, 3, 27, 0, 0, 10),
self.client.retry_after(response=self.response, default=10))
@mock.patch('acme.client.datetime')
def test_retry_after_seconds(self, dt_mock):
dt_mock.datetime.now.return_value = datetime.datetime(2015, 3, 27)
dt_mock.timedelta = datetime.timedelta
self.response.headers['Retry-After'] = '50'
self.assertEqual(
datetime.datetime(2015, 3, 27, 0, 0, 50),
self.client.retry_after(response=self.response, default=10))
@mock.patch('acme.client.datetime')
def test_retry_after_missing(self, dt_mock):
dt_mock.datetime.now.return_value = datetime.datetime(2015, 3, 27)
dt_mock.timedelta = datetime.timedelta
self.assertEqual(
datetime.datetime(2015, 3, 27, 0, 0, 10),
self.client.retry_after(response=self.response, default=10))
def test_poll(self):
self.response.json.return_value = self.authzr.body.to_json()
self.assertEqual((self.authzr, self.response),
self.client.poll(self.authzr))
# TODO: split here and separate test
self.response.json.return_value = self.authz.update(
identifier=self.identifier.update(value='foo')).to_json()
self.assertRaises(
errors.UnexpectedUpdate, self.client.poll, self.authzr)
def test_request_issuance(self):
self.response.content = CERT_DER
self.response.headers['Location'] = self.certr.uri
self.response.links['up'] = {'url': self.certr.cert_chain_uri}
self.assertEqual(self.certr, self.client.request_issuance(
messages_test.CSR, (self.authzr,)))
# TODO: check POST args
def test_request_issuance_missing_up(self):
self.response.content = CERT_DER
self.response.headers['Location'] = self.certr.uri
self.assertEqual(
self.certr.update(cert_chain_uri=None),
self.client.request_issuance(messages_test.CSR, (self.authzr,)))
def test_request_issuance_missing_location(self):
self.assertRaises(
errors.ClientError, self.client.request_issuance,
messages_test.CSR, (self.authzr,))
@mock.patch('acme.client.datetime')
@mock.patch('acme.client.time')
def test_poll_and_request_issuance(self, time_mock, dt_mock):
# clock.dt | pylint: disable=no-member
clock = mock.MagicMock(dt=datetime.datetime(2015, 3, 27))
def sleep(seconds):
"""increment clock"""
clock.dt += datetime.timedelta(seconds=seconds)
time_mock.sleep.side_effect = sleep
def now():
"""return current clock value"""
return clock.dt
dt_mock.datetime.now.side_effect = now
dt_mock.timedelta = datetime.timedelta
def poll(authzr): # pylint: disable=missing-docstring
# record poll start time based on the current clock value
authzr.times.append(clock.dt)
# suppose it takes 2 seconds for server to produce the
# result, increment clock
clock.dt += datetime.timedelta(seconds=2)
if len(authzr.retries) == 1: # no more retries
done = mock.MagicMock(uri=authzr.uri, times=authzr.times)
done.body.status = authzr.retries[0]
return done, []
# response (2nd result tuple element) is reduced to only
# Retry-After header contents represented as integer
# seconds; authzr.retries is a list of Retry-After
# headers, head(retries) is peeled of as a current
# Retry-After header, and tail(retries) is persisted for
# later poll() calls
return (mock.MagicMock(retries=authzr.retries[1:],
uri=authzr.uri + '.', times=authzr.times),
authzr.retries[0])
self.client.poll = mock.MagicMock(side_effect=poll)
mintime = 7
def retry_after(response, default):
# pylint: disable=missing-docstring
# check that poll_and_request_issuance correctly passes mintime
self.assertEqual(default, mintime)
return clock.dt + datetime.timedelta(seconds=response)
self.client.retry_after = mock.MagicMock(side_effect=retry_after)
def request_issuance(csr, authzrs): # pylint: disable=missing-docstring
return csr, authzrs
self.client.request_issuance = mock.MagicMock(
side_effect=request_issuance)
csr = mock.MagicMock()
authzrs = (
mock.MagicMock(uri='a', times=[], retries=(
8, 20, 30, messages.STATUS_VALID)),
mock.MagicMock(uri='b', times=[], retries=(
5, messages.STATUS_VALID)),
)
cert, updated_authzrs = self.client.poll_and_request_issuance(
csr, authzrs, mintime=mintime,
# make sure that max_attempts is per-authorization, rather
# than global
max_attempts=max(len(authzrs[0].retries), len(authzrs[1].retries)))
self.assertIs(cert[0], csr)
self.assertIs(cert[1], updated_authzrs)
self.assertEqual(updated_authzrs[0].uri, 'a...')
self.assertEqual(updated_authzrs[1].uri, 'b.')
self.assertEqual(updated_authzrs[0].times, [
datetime.datetime(2015, 3, 27),
# a is scheduled for 10, but b is polling [9..11), so it
# will be picked up as soon as b is finished, without
# additional sleeping
datetime.datetime(2015, 3, 27, 0, 0, 11),
datetime.datetime(2015, 3, 27, 0, 0, 33),
datetime.datetime(2015, 3, 27, 0, 1, 5),
])
self.assertEqual(updated_authzrs[1].times, [
datetime.datetime(2015, 3, 27, 0, 0, 2),
datetime.datetime(2015, 3, 27, 0, 0, 9),
])
self.assertEqual(clock.dt, datetime.datetime(2015, 3, 27, 0, 1, 7))
# CA sets invalid | TODO: move to a separate test
invalid_authzr = mock.MagicMock(
times=[], retries=[messages.STATUS_INVALID])
self.assertRaises(
errors.PollError, self.client.poll_and_request_issuance,
csr, authzrs=(invalid_authzr,), mintime=mintime)
# exceeded max_attempts | TODO: move to a separate test
self.assertRaises(
errors.PollError, self.client.poll_and_request_issuance,
csr, authzrs, mintime=mintime, max_attempts=2)
def test_deactivate_authorization(self):
authzb = self.authzr.body.update(status=messages.STATUS_DEACTIVATED)
self.response.json.return_value = authzb.to_json()
authzr = self.client.deactivate_authorization(self.authzr)
self.assertEqual(authzb, authzr.body)
self.assertEqual(self.client.net.post.call_count, 1)
self.assertIn(self.authzr.uri, self.net.post.call_args_list[0][0])
def test_check_cert(self):
self.response.headers['Location'] = self.certr.uri
self.response.content = CERT_DER
self.assertEqual(self.certr.update(body=messages_test.CERT),
self.client.check_cert(self.certr))
# TODO: split here and separate test
self.response.headers['Location'] = 'foo'
self.assertRaises(
errors.UnexpectedUpdate, self.client.check_cert, self.certr)
def test_check_cert_missing_location(self):
self.response.content = CERT_DER
self.assertRaises(
errors.ClientError, self.client.check_cert, self.certr)
def test_refresh(self):
self.client.check_cert = mock.MagicMock()
self.assertEqual(
self.client.check_cert(self.certr), self.client.refresh(self.certr))
def test_fetch_chain_no_up_link(self):
self.assertEqual([], self.client.fetch_chain(self.certr.update(
cert_chain_uri=None)))
def test_fetch_chain_single(self):
# pylint: disable=protected-access
self.client._get_cert = mock.MagicMock()
self.client._get_cert.return_value = (
mock.MagicMock(links={}), "certificate")
self.assertEqual([self.client._get_cert(self.certr.cert_chain_uri)[1]],
self.client.fetch_chain(self.certr))
def test_fetch_chain_max(self):
# pylint: disable=protected-access
up_response = mock.MagicMock(links={'up': {'url': 'http://cert'}})
noup_response = mock.MagicMock(links={})
self.client._get_cert = mock.MagicMock()
self.client._get_cert.side_effect = [
(up_response, "cert")] * 9 + [(noup_response, "last_cert")]
chain = self.client.fetch_chain(self.certr, max_length=10)
self.assertEqual(chain, ["cert"] * 9 + ["last_cert"])
def test_fetch_chain_too_many(self): # recursive
# pylint: disable=protected-access
response = mock.MagicMock(links={'up': {'url': 'http://cert'}})
self.client._get_cert = mock.MagicMock()
self.client._get_cert.return_value = (response, "certificate")
self.assertRaises(errors.Error, self.client.fetch_chain, self.certr)
def test_revoke(self):
self.client.revoke(self.certr.body, self.rsn)
self.net.post.assert_called_once_with(
self.directory[messages.Revocation], mock.ANY, acme_version=1)
def test_revocation_payload(self):
obj = messages.Revocation(certificate=self.certr.body, reason=self.rsn)
self.assertIn('reason', obj.to_partial_json().keys())
self.assertEqual(self.rsn, obj.to_partial_json()['reason'])
def test_revoke_bad_status_raises_error(self):
self.response.status_code = http_client.METHOD_NOT_ALLOWED
self.assertRaises(
errors.ClientError,
self.client.revoke,
self.certr,
self.rsn)
class ClientV2Test(ClientTestBase):
"""Tests for acme.client.ClientV2."""
def setUp(self):
super().setUp()
self.directory = DIRECTORY_V2
from acme.client import ClientV2
self.client = ClientV2(self.directory, self.net)
self.new_reg = self.new_reg.update(terms_of_service_agreed=True)
@@ -752,11 +103,40 @@ class ClientV2Test(ClientTestBase):
self.assertEqual(self.regr, self.client.new_account(self.new_reg))
def test_new_account_tos_link(self):
self.response.status_code = http_client.CREATED
self.response.json.return_value = self.regr.body.to_json()
self.response.headers['Location'] = self.regr.uri
self.response.links.update({
'terms-of-service': {'url': 'https://www.letsencrypt-demo.org/tos'},
})
self.assertEqual(self.client.new_account(self.new_reg).terms_of_service,
'https://www.letsencrypt-demo.org/tos')
def test_new_account_conflict(self):
self.response.status_code = http_client.OK
self.response.headers['Location'] = self.regr.uri
self.assertRaises(errors.ConflictError, self.client.new_account, self.new_reg)
def test_deactivate_account(self):
deactivated_regr = self.regr.update(
body=self.regr.body.update(status='deactivated'))
self.response.json.return_value = deactivated_regr.body.to_json()
self.response.status_code = http_client.OK
self.response.headers['Location'] = self.regr.uri
self.assertEqual(self.client.deactivate_registration(self.regr), deactivated_regr)
def test_deactivate_authorization(self):
deactivated_authz = self.authzr.update(
body=self.authzr.body.update(status=messages.STATUS_DEACTIVATED))
self.response.json.return_value = deactivated_authz.body.to_json()
authzr = self.client.deactivate_authorization(self.authzr)
self.assertEqual(deactivated_authz.body, authzr.body)
self.assertEqual(self.client.net.post.call_count, 1)
self.assertIn(self.authzr.uri, self.net.post.call_args_list[0][0])
def test_new_order(self):
order_response = copy.deepcopy(self.response)
order_response.status_code = http_client.CREATED
@@ -775,6 +155,20 @@ class ClientV2Test(ClientTestBase):
mock_post_as_get.side_effect = (authz_response, authz_response2)
self.assertEqual(self.client.new_order(CSR_MIXED_PEM), self.orderr)
def test_answer_challege(self):
self.response.links['up'] = {'url': self.challr.authzr_uri}
self.response.json.return_value = self.challr.body.to_json()
chall_response = challenges.DNSResponse(validation=None)
self.client.answer_challenge(self.challr.body, chall_response)
self.assertRaises(errors.UnexpectedUpdate, self.client.answer_challenge,
self.challr.body.update(uri='foo'), chall_response)
def test_answer_challenge_missing_next(self):
self.assertRaises(
errors.ClientError, self.client.answer_challenge,
self.challr.body, challenges.DNSResponse(validation=None))
@mock.patch('acme.client.datetime')
def test_poll_and_finalize(self, mock_datetime):
mock_datetime.datetime.now.return_value = datetime.datetime(2018, 2, 15)
@@ -821,6 +215,11 @@ class ClientV2Test(ClientTestBase):
self.authz.to_json(), self.authz2.to_json(), updated_authz2.to_json())
self.assertEqual(self.client.poll_authorizations(self.orderr, deadline), updated_orderr)
def test_poll_unexpected_update(self):
updated_authz = self.authz.update(identifier=self.identifier.update(value='foo'))
self.response.json.return_value = updated_authz.to_json()
self.assertRaises(errors.UnexpectedUpdate, self.client.poll, self.authzr)
def test_finalize_order_success(self):
updated_order = self.order.update(
certificate='https://www.letsencrypt-demo.org/acme/cert/',
@@ -872,9 +271,9 @@ class ClientV2Test(ClientTestBase):
deadline = datetime.datetime(9999, 9, 9)
resp = self.client.finalize_order(self.orderr, deadline, fetch_alternative_chains=True)
self.net.post.assert_any_call('https://example.com/acme/cert/1',
mock.ANY, acme_version=2, new_nonce_url=mock.ANY)
mock.ANY, new_nonce_url=mock.ANY)
self.net.post.assert_any_call('https://example.com/acme/cert/2',
mock.ANY, acme_version=2, new_nonce_url=mock.ANY)
mock.ANY, new_nonce_url=mock.ANY)
self.assertEqual(resp, updated_orderr)
del self.response.headers['Link']
@@ -884,8 +283,15 @@ class ClientV2Test(ClientTestBase):
def test_revoke(self):
self.client.revoke(messages_test.CERT, self.rsn)
self.net.post.assert_called_once_with(
self.directory["revokeCert"], mock.ANY, acme_version=2,
new_nonce_url=DIRECTORY_V2['newNonce'])
self.directory["revokeCert"], mock.ANY, new_nonce_url=DIRECTORY_V2['newNonce'])
def test_revoke_bad_status_raises_error(self):
self.response.status_code = http_client.METHOD_NOT_ALLOWED
self.assertRaises(
errors.ClientError,
self.client.revoke,
messages_test.CERT,
self.rsn)
def test_update_registration(self):
# "Instance of 'Field' has no to_json/update member" bug:
@@ -916,6 +322,11 @@ class ClientV2Test(ClientTestBase):
def test_external_account_required_default(self):
self.assertFalse(self.client.external_account_required())
def test_query_registration_client(self):
self.response.json.return_value = self.regr.body.to_json()
self.response.headers['Location'] = 'https://www.letsencrypt-demo.org/acme/reg/1'
self.assertEqual(self.regr, self.client.query_registration(self.regr))
def test_post_as_get(self):
with mock.patch('acme.client.ClientV2._authzr_from_response') as mock_client:
mock_client.return_value = self.authzr2
@@ -923,12 +334,64 @@ class ClientV2Test(ClientTestBase):
self.client.poll(self.authzr2) # pylint: disable=protected-access
self.client.net.post.assert_called_once_with(
self.authzr2.uri, None, acme_version=2,
self.authzr2.uri, None,
new_nonce_url='https://www.letsencrypt-demo.org/acme/new-nonce')
self.client.net.get.assert_not_called()
def test_retry_after_date(self):
self.response.headers['Retry-After'] = 'Fri, 31 Dec 1999 23:59:59 GMT'
self.assertEqual(
datetime.datetime(1999, 12, 31, 23, 59, 59),
self.client.retry_after(response=self.response, default=10))
class MockJSONDeSerializable(VersionedLEACMEMixin, jose.JSONDeSerializable):
@mock.patch('acme.client.datetime')
def test_retry_after_invalid(self, dt_mock):
dt_mock.datetime.now.return_value = datetime.datetime(2015, 3, 27)
dt_mock.timedelta = datetime.timedelta
self.response.headers['Retry-After'] = 'foooo'
self.assertEqual(
datetime.datetime(2015, 3, 27, 0, 0, 10),
self.client.retry_after(response=self.response, default=10))
@mock.patch('acme.client.datetime')
def test_retry_after_overflow(self, dt_mock):
dt_mock.datetime.now.return_value = datetime.datetime(2015, 3, 27)
dt_mock.timedelta = datetime.timedelta
dt_mock.datetime.side_effect = datetime.datetime
self.response.headers['Retry-After'] = "Tue, 116 Feb 2016 11:50:00 MST"
self.assertEqual(
datetime.datetime(2015, 3, 27, 0, 0, 10),
self.client.retry_after(response=self.response, default=10))
@mock.patch('acme.client.datetime')
def test_retry_after_seconds(self, dt_mock):
dt_mock.datetime.now.return_value = datetime.datetime(2015, 3, 27)
dt_mock.timedelta = datetime.timedelta
self.response.headers['Retry-After'] = '50'
self.assertEqual(
datetime.datetime(2015, 3, 27, 0, 0, 50),
self.client.retry_after(response=self.response, default=10))
@mock.patch('acme.client.datetime')
def test_retry_after_missing(self, dt_mock):
dt_mock.datetime.now.return_value = datetime.datetime(2015, 3, 27)
dt_mock.timedelta = datetime.timedelta
self.assertEqual(
datetime.datetime(2015, 3, 27, 0, 0, 10),
self.client.retry_after(response=self.response, default=10))
def test_get_directory(self):
self.response.json.return_value = DIRECTORY_V2.to_json()
self.assertEqual(
DIRECTORY_V2.to_partial_json(),
ClientV2.get_directory('https://example.com/dir', self.net).to_partial_json())
class MockJSONDeSerializable(jose.JSONDeSerializable):
# pylint: disable=missing-docstring
def __init__(self, value):
self.value = value
@@ -948,7 +411,6 @@ class ClientNetworkTest(unittest.TestCase):
self.verify_ssl = mock.MagicMock()
self.wrap_in_jws = mock.MagicMock(return_value=mock.sentinel.wrapped)
from acme.client import ClientNetwork
self.net = ClientNetwork(
key=KEY, alg=jose.RS256, verify_ssl=self.verify_ssl,
user_agent='acme-python-test')
@@ -963,8 +425,7 @@ class ClientNetworkTest(unittest.TestCase):
def test_wrap_in_jws(self):
# pylint: disable=protected-access
jws_dump = self.net._wrap_in_jws(
MockJSONDeSerializable('foo'), nonce=b'Tg', url="url",
acme_version=1)
MockJSONDeSerializable('foo'), nonce=b'Tg', url="url")
jws = acme_jws.JWS.json_loads(jws_dump)
self.assertEqual(json.loads(jws.payload.decode()), {'foo': 'foo'})
self.assertEqual(jws.signature.combined.nonce, b'Tg')
@@ -973,8 +434,7 @@ class ClientNetworkTest(unittest.TestCase):
self.net.account = {'uri': 'acct-uri'}
# pylint: disable=protected-access
jws_dump = self.net._wrap_in_jws(
MockJSONDeSerializable('foo'), nonce=b'Tg', url="url",
acme_version=2)
MockJSONDeSerializable('foo'), nonce=b'Tg', url="url")
jws = acme_jws.JWS.json_loads(jws_dump)
self.assertEqual(json.loads(jws.payload.decode()), {'foo': 'foo'})
self.assertEqual(jws.signature.combined.nonce, b'Tg')
@@ -1080,14 +540,13 @@ class ClientNetworkTest(unittest.TestCase):
self.net.session = mock.MagicMock()
self.net.session.request.return_value = mock.MagicMock(
ok=True, status_code=http_client.OK,
headers={"Content-Type": "application/pkix-cert"},
content=b"hi")
# pylint: disable=protected-access
self.net._send_request('HEAD', 'http://example.com/', 'foo',
timeout=mock.ANY, bar='baz')
timeout=mock.ANY, bar='baz', headers={'Accept': 'application/pkix-cert'})
mock_logger.debug.assert_called_with(
'Received response:\nHTTP %d\n%s\n\n%s', 200,
'Content-Type: application/pkix-cert', b'aGk=')
'', b'aGk=')
def test_send_request_post(self):
self.net.session = mock.MagicMock()
@@ -1179,7 +638,6 @@ class ClientNetworkWithMockedResponseTest(unittest.TestCase):
"""Tests for acme.client.ClientNetwork which mock out response."""
def setUp(self):
from acme.client import ClientNetwork
self.net = ClientNetwork(key=None, alg=None)
self.response = mock.MagicMock(ok=True, status_code=http_client.OK)
@@ -1260,13 +718,13 @@ class ClientNetworkWithMockedResponseTest(unittest.TestCase):
'uri', self.obj, content_type=self.content_type))
self.assertTrue(self.response.checked)
self.net._wrap_in_jws.assert_called_once_with(
self.obj, jose.b64decode(self.all_nonces.pop()), "uri", 1)
self.obj, jose.b64decode(self.all_nonces.pop()), "uri")
self.available_nonces = []
self.assertRaises(errors.MissingNonce, self.net.post,
'uri', self.obj, content_type=self.content_type)
self.net._wrap_in_jws.assert_called_with(
self.obj, jose.b64decode(self.all_nonces.pop()), "uri", 1)
self.obj, jose.b64decode(self.all_nonces.pop()), "uri")
def test_post_wrong_initial_nonce(self): # HEAD
self.available_nonces = [b'f', jose.b64encode(b'good')]
@@ -1324,41 +782,14 @@ class ClientNetworkWithMockedResponseTest(unittest.TestCase):
check_response = mock.MagicMock()
self.net._check_response = check_response
self.assertRaises(errors.ClientError, self.net.post, 'uri',
self.obj, content_type=self.content_type, acme_version=2,
self.obj, content_type=self.content_type,
new_nonce_url='new_nonce_uri')
self.assertEqual(check_response.call_count, 1)
def test_new_nonce_uri_removed(self):
self.content_type = None
self.net.post('uri', self.obj, content_type=None,
acme_version=2, new_nonce_url='new_nonce_uri')
self.net.post('uri', self.obj, content_type=None, new_nonce_url='new_nonce_uri')
class ClientNetworkSourceAddressBindingTest(unittest.TestCase):
"""Tests that if ClientNetwork has a source IP set manually, the underlying library has
used the provided source address."""
def setUp(self):
self.source_address = "8.8.8.8"
def test_source_address_set(self):
from acme.client import ClientNetwork
net = ClientNetwork(key=None, alg=None, source_address=self.source_address)
for adapter in net.session.adapters.values():
self.assertIn(self.source_address, adapter.source_address)
def test_behavior_assumption(self):
"""This is a test that guardrails the HTTPAdapter behavior so that if the default for
a Session() changes, the assumptions here aren't violated silently."""
from acme.client import ClientNetwork
# Source address not specified, so the default adapter type should be bound -- this
# test should fail if the default adapter type is changed by requests
net = ClientNetwork(key=None, alg=None)
session = requests.Session()
for scheme in session.adapters:
client_network_adapter = net.session.adapters.get(scheme)
default_adapter = session.adapters.get(scheme)
self.assertEqual(client_network_adapter.__class__, default_adapter.__class__)
if __name__ == '__main__':
unittest.main() # pragma: no cover

View File

@@ -1,6 +1,7 @@
"""Tests for acme.fields."""
import datetime
import unittest
import warnings
import josepy as jose
import pytz
@@ -54,19 +55,5 @@ class RFC3339FieldTest(unittest.TestCase):
jose.DeserializationError, RFC3339Field.default_decoder, '')
class ResourceTest(unittest.TestCase):
"""Tests for acme.fields.Resource."""
def setUp(self):
from acme.fields import Resource
self.field = Resource('x')
def test_decode_good(self):
self.assertEqual('x', self.field.decode('x'))
def test_decode_wrong(self):
self.assertRaises(jose.DeserializationError, self.field.decode, 'y')
if __name__ == '__main__':
unittest.main() # pragma: no cover

View File

@@ -1,30 +0,0 @@
"""Tests for acme.magic_typing."""
import sys
import unittest
import warnings
from unittest import mock
class MagicTypingTest(unittest.TestCase):
"""Tests for acme.magic_typing."""
def test_import_success(self):
try:
import typing as temp_typing
except ImportError: # pragma: no cover
temp_typing = None # pragma: no cover
typing_class_mock = mock.MagicMock()
text_mock = mock.MagicMock()
typing_class_mock.Text = text_mock
sys.modules['typing'] = typing_class_mock
if 'acme.magic_typing' in sys.modules:
del sys.modules['acme.magic_typing'] # pragma: no cover
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=DeprecationWarning)
from acme.magic_typing import Text
self.assertEqual(Text, text_mock)
del sys.modules['acme.magic_typing']
sys.modules['typing'] = temp_typing
if __name__ == '__main__':
unittest.main() # pragma: no cover

View File

@@ -2,6 +2,7 @@
from typing import Dict
import unittest
from unittest import mock
import warnings
import josepy as jose
@@ -134,8 +135,8 @@ class DirectoryTest(unittest.TestCase):
def setUp(self):
from acme.messages import Directory
self.dir = Directory({
'new-reg': 'reg',
mock.MagicMock(resource_type='new-cert'): 'cert',
'newReg': 'reg',
'newCert': 'cert',
'meta': Directory.Meta(
terms_of_service='https://example.com/acme/terms',
website='https://www.example.com/',
@@ -148,26 +149,23 @@ class DirectoryTest(unittest.TestCase):
Directory({'foo': 'bar'})
def test_getitem(self):
self.assertEqual('reg', self.dir['new-reg'])
from acme.messages import NewRegistration
self.assertEqual('reg', self.dir[NewRegistration])
self.assertEqual('reg', self.dir[NewRegistration()])
self.assertEqual('reg', self.dir['newReg'])
def test_getitem_fails_with_key_error(self):
self.assertRaises(KeyError, self.dir.__getitem__, 'foo')
def test_getattr(self):
self.assertEqual('reg', self.dir.new_reg)
self.assertEqual('reg', self.dir.newReg)
def test_getattr_fails_with_attribute_error(self):
self.assertRaises(AttributeError, self.dir.__getattr__, 'foo')
def test_to_json(self):
self.assertEqual(self.dir.to_json(), {
'new-reg': 'reg',
'new-cert': 'cert',
'newReg': 'reg',
'newCert': 'cert',
'meta': {
'terms-of-service': 'https://example.com/acme/terms',
'termsOfService': 'https://example.com/acme/terms',
'website': 'https://www.example.com/',
'caaIdentities': ['example.com'],
},
@@ -287,7 +285,7 @@ class UpdateRegistrationTest(unittest.TestCase):
def test_empty(self):
from acme.messages import UpdateRegistration
jstring = '{"resource": "reg"}'
self.assertEqual(jstring, UpdateRegistration().json_dumps())
self.assertEqual('{}', UpdateRegistration().json_dumps())
self.assertEqual(
UpdateRegistration(), UpdateRegistration.json_loads(jstring))
@@ -335,7 +333,7 @@ class ChallengeBodyTest(unittest.TestCase):
error=error)
self.jobj_to = {
'uri': 'http://challb',
'url': 'http://challb',
'status': self.status,
'type': 'dns',
'token': 'evaGxfADs6pSRb2LAv9IZf17Dt3juxGJ-PCt92wr-oA',
@@ -382,20 +380,17 @@ class AuthorizationTest(unittest.TestCase):
chall=challenges.DNS(
token=b'DGyRejmCefe7v4NfDGDKfA')),
)
combinations = ((0,), (1,))
from acme.messages import Authorization
from acme.messages import Identifier
from acme.messages import IDENTIFIER_FQDN
identifier = Identifier(typ=IDENTIFIER_FQDN, value='example.com')
self.authz = Authorization(
identifier=identifier, combinations=combinations,
challenges=self.challbs)
identifier=identifier, challenges=self.challbs)
self.jobj_from = {
'identifier': identifier.to_json(),
'challenges': [challb.to_json() for challb in self.challbs],
'combinations': combinations,
}
def test_from_json(self):
@@ -406,12 +401,6 @@ class AuthorizationTest(unittest.TestCase):
from acme.messages import Authorization
hash(Authorization.from_json(self.jobj_from))
def test_resolved_combinations(self):
self.assertEqual(self.authz.resolved_combinations, (
(self.challbs[0],),
(self.challbs[1],),
))
class AuthorizationResourceTest(unittest.TestCase):
"""Tests for acme.messages.AuthorizationResource."""
@@ -502,7 +491,6 @@ class JWSPayloadRFC8555Compliant(unittest.TestCase):
from acme.messages import NewAuthorization
new_order = NewAuthorization()
new_order.le_acme_version = 2
jobj = new_order.json_dumps(indent=2).encode()
# RFC8555 states that JWS bodies must not have a resource field.

View File

@@ -354,12 +354,9 @@ class ApacheConfigurator(common.Configurator):
self.version = self.get_version()
logger.debug('Apache version is %s',
'.'.join(str(i) for i in self.version))
if self.version < (2, 2):
if self.version < (2, 4):
raise errors.NotSupportedError(
"Apache Version {0} not supported.".format(str(self.version)))
elif self.version < (2, 4):
logger.warning('Support for Apache 2.2 is deprecated and will be removed in a '
'future release.')
# Recover from previous crash before Augeas initialization to have the
# correct parse tree from the get go.
@@ -1176,46 +1173,6 @@ class ApacheConfigurator(common.Configurator):
vhost.aliases.add(serveralias)
vhost.name = servername
def is_name_vhost(self, target_addr: obj.Addr) -> bool:
"""Returns if vhost is a name based vhost
NameVirtualHost was deprecated in Apache 2.4 as all VirtualHosts are
now NameVirtualHosts. If version is earlier than 2.4, check if addr
has a NameVirtualHost directive in the Apache config
:param certbot_apache._internal.obj.Addr target_addr: vhost address
:returns: Success
:rtype: bool
"""
# Mixed and matched wildcard NameVirtualHost with VirtualHost
# behavior is undefined. Make sure that an exact match exists
# search for NameVirtualHost directive for ip_addr
# note ip_addr can be FQDN although Apache does not recommend it
return (self.version >= (2, 4) or
bool(self.parser.find_dir("NameVirtualHost", str(target_addr))))
def add_name_vhost(self, addr: obj.Addr) -> None:
"""Adds NameVirtualHost directive for given address.
:param addr: Address that will be added as NameVirtualHost directive
:type addr: :class:`~certbot_apache._internal.obj.Addr`
"""
loc = parser.get_aug_path(self.parser.loc["name"])
if addr.get_port() == "443":
self.parser.add_dir_to_ifmodssl(
loc, "NameVirtualHost", [str(addr)])
else:
self.parser.add_dir(loc, "NameVirtualHost", [str(addr)])
msg = "Setting {0} to be NameBasedVirtualHost\n".format(addr)
logger.debug(msg)
self.save_notes += msg
def prepare_server_https(self, port: str, temp: bool = False) -> None:
"""Prepare the server for HTTPS.
@@ -1363,8 +1320,7 @@ class ApacheConfigurator(common.Configurator):
"""
if self.options.handle_modules:
if self.version >= (2, 4) and ("socache_shmcb_module" not in
self.parser.modules):
if "socache_shmcb_module" not in self.parser.modules:
self.enable_mod("socache_shmcb", temp=temp)
if "ssl_module" not in self.parser.modules:
self.enable_mod("ssl", temp=temp)
@@ -1451,10 +1407,6 @@ class ApacheConfigurator(common.Configurator):
# for the new directives; For these reasons... this is tacked
# on after fully creating the new vhost
# Now check if addresses need to be added as NameBasedVhost addrs
# This is for compliance with versions of Apache < 2.4
self._add_name_vhost_if_necessary(ssl_vhost)
return ssl_vhost
def _get_new_vh_path(self, orig_matches: List[str], new_matches: List[str]) -> Optional[str]:
@@ -1753,40 +1705,6 @@ class ApacheConfigurator(common.Configurator):
aliases = (self.parser.aug.get(match) for match in matches)
return self.domain_in_names(aliases, target_name)
def _add_name_vhost_if_necessary(self, vhost: obj.VirtualHost) -> None:
"""Add NameVirtualHost Directives if necessary for new vhost.
NameVirtualHosts was a directive in Apache < 2.4
https://httpd.apache.org/docs/2.2/mod/core.html#namevirtualhost
:param vhost: New virtual host that was recently created.
:type vhost: :class:`~certbot_apache._internal.obj.VirtualHost`
"""
need_to_save: bool = False
# See if the exact address appears in any other vhost
# Remember 1.1.1.1:* == 1.1.1.1 -> hence any()
for addr in vhost.addrs:
# In Apache 2.2, when a NameVirtualHost directive is not
# set, "*" and "_default_" will conflict when sharing a port
addrs = {addr,}
if addr.get_addr() in ("*", "_default_"):
addrs.update(obj.Addr((a, addr.get_port(),))
for a in ("*", "_default_"))
for test_vh in self.vhosts:
if (vhost.filep != test_vh.filep and
any(test_addr in addrs for
test_addr in test_vh.addrs) and not self.is_name_vhost(addr)):
self.add_name_vhost(addr)
logger.info("Enabling NameVirtualHosts on %s", addr)
need_to_save = True
break
if need_to_save:
self.save()
def find_vhost_by_id(self, id_str: str) -> obj.VirtualHost:
"""
Searches through VirtualHosts and tries to match the id in a comment
@@ -2002,12 +1920,6 @@ class ApacheConfigurator(common.Configurator):
:param unused_options: Not currently used
:type unused_options: Not Available
"""
min_apache_ver = (2, 3, 3)
if self.get_version() < min_apache_ver:
raise errors.PluginError(
"Unable to set OCSP directives.\n"
"Apache version is below 2.3.3.")
if "socache_shmcb_module" not in self.parser.modules:
self.enable_mod("socache_shmcb")
@@ -2188,10 +2100,7 @@ class ApacheConfigurator(common.Configurator):
general_vh.filep, ssl_vhost.filep)
def _set_https_redirection_rewrite_rule(self, vhost: obj.VirtualHost) -> None:
if self.get_version() >= (2, 3, 9):
self.parser.add_dir(vhost.path, "RewriteRule", constants.REWRITE_HTTPS_ARGS_WITH_END)
else:
self.parser.add_dir(vhost.path, "RewriteRule", constants.REWRITE_HTTPS_ARGS)
self.parser.add_dir(vhost.path, "RewriteRule", constants.REWRITE_HTTPS_ARGS)
def _verify_no_certbot_redirect(self, vhost: obj.VirtualHost) -> None:
"""Checks to see if a redirect was already installed by certbot.
@@ -2223,9 +2132,6 @@ class ApacheConfigurator(common.Configurator):
rewrite_args_dict[dir_path].append(match)
if rewrite_args_dict:
redirect_args = [constants.REWRITE_HTTPS_ARGS,
constants.REWRITE_HTTPS_ARGS_WITH_END]
for dir_path, args_paths in rewrite_args_dict.items():
arg_vals = [self.parser.aug.get(x) for x in args_paths]
@@ -2237,7 +2143,7 @@ class ApacheConfigurator(common.Configurator):
raise errors.PluginEnhancementAlreadyPresent(
"Certbot has already enabled redirection")
if arg_vals in redirect_args:
if arg_vals == constants.REWRITE_HTTPS_ARGS:
raise errors.PluginEnhancementAlreadyPresent(
"Certbot has already enabled redirection")
@@ -2306,12 +2212,6 @@ class ApacheConfigurator(common.Configurator):
if ssl_vhost.aliases:
serveralias = "ServerAlias " + " ".join(ssl_vhost.aliases)
rewrite_rule_args: List[str]
if self.get_version() >= (2, 3, 9):
rewrite_rule_args = constants.REWRITE_HTTPS_ARGS_WITH_END
else:
rewrite_rule_args = constants.REWRITE_HTTPS_ARGS
return (
f"<VirtualHost {' '.join(str(addr) for addr in self._get_proposed_addrs(ssl_vhost))}>\n"
f"{servername} \n"
@@ -2319,7 +2219,7 @@ class ApacheConfigurator(common.Configurator):
f"ServerSignature Off\n"
f"\n"
f"RewriteEngine On\n"
f"RewriteRule {' '.join(rewrite_rule_args)}\n"
f"RewriteRule {' '.join(constants.REWRITE_HTTPS_ARGS)}\n"
"\n"
f"ErrorLog {self.options.logs_root}/redirect.error.log\n"
f"LogLevel warn\n"

View File

@@ -42,18 +42,14 @@ AUGEAS_LENS_DIR = pkg_resources.resource_filename(
"""Path to the Augeas lens directory"""
REWRITE_HTTPS_ARGS: List[str] = [
"^", "https://%{SERVER_NAME}%{REQUEST_URI}", "[L,NE,R=permanent]"]
"""Apache version<2.3.9 rewrite rule arguments used for redirections to
https vhost"""
REWRITE_HTTPS_ARGS_WITH_END: List[str] = [
"^", "https://%{SERVER_NAME}%{REQUEST_URI}", "[END,NE,R=permanent]"]
"""Apache version >= 2.3.9 rewrite rule arguments used for redirections to
https vhost"""
OLD_REWRITE_HTTPS_ARGS: List[List[str]] = [
["^", "https://%{SERVER_NAME}%{REQUEST_URI}", "[L,QSA,R=permanent]"],
["^", "https://%{SERVER_NAME}%{REQUEST_URI}", "[END,QSA,R=permanent]"]]
["^", "https://%{SERVER_NAME}%{REQUEST_URI}", "[END,QSA,R=permanent]"],
["^", "https://%{SERVER_NAME}%{REQUEST_URI}", "[L,NE,R=permanent]"]]
HSTS_ARGS: List[str] = ["always", "set", "Strict-Transport-Security",
"\"max-age=31536000\""]

View File

@@ -24,22 +24,6 @@ logger = logging.getLogger(__name__)
class ApacheHttp01(common.ChallengePerformer):
"""Class that performs HTTP-01 challenges within the Apache configurator."""
CONFIG_TEMPLATE22_PRE = """\
RewriteEngine on
RewriteRule ^/\\.well-known/acme-challenge/([A-Za-z0-9-_=]+)$ {0}/$1 [L]
"""
CONFIG_TEMPLATE22_POST = """\
<Directory {0}>
Order Allow,Deny
Allow from all
</Directory>
<Location /.well-known/acme-challenge>
Order Allow,Deny
Allow from all
</Location>
"""
CONFIG_TEMPLATE24_PRE = """\
RewriteEngine on
RewriteRule ^/\\.well-known/acme-challenge/([A-Za-z0-9-_=]+)$ {0}/$1 [END]
@@ -90,11 +74,7 @@ class ApacheHttp01(common.ChallengePerformer):
"""Make sure that we have the needed modules available for http01"""
if self.configurator.conf("handle-modules"):
needed_modules = ["rewrite"]
if self.configurator.version < (2, 4):
needed_modules.append("authz_host")
else:
needed_modules.append("authz_core")
needed_modules = ["rewrite", "authz_core"]
for mod in needed_modules:
if mod + "_module" not in self.configurator.parser.modules:
self.configurator.enable_mod(mod, temp=True)
@@ -131,15 +111,8 @@ class ApacheHttp01(common.ChallengePerformer):
self.configurator.reverter.register_file_creation(
True, self.challenge_conf_post)
if self.configurator.version < (2, 4):
config_template_pre = self.CONFIG_TEMPLATE22_PRE
config_template_post = self.CONFIG_TEMPLATE22_POST
else:
config_template_pre = self.CONFIG_TEMPLATE24_PRE
config_template_post = self.CONFIG_TEMPLATE24_POST
config_text_pre = config_template_pre.format(self.challenge_dir)
config_text_post = config_template_post.format(self.challenge_dir)
config_text_pre = self.CONFIG_TEMPLATE24_PRE.format(self.challenge_dir)
config_text_post = self.CONFIG_TEMPLATE24_POST.format(self.challenge_dir)
logger.debug("writing a pre config file with text:\n %s", config_text_pre)
with open(self.challenge_conf_pre, "w") as new_conf:

View File

@@ -1,8 +1,6 @@
""" Distribution specific override class for CentOS family (RHEL, Fedora) """
import logging
from typing import Any
from typing import cast
from typing import List
from certbot_apache._internal import apache_util
from certbot_apache._internal import configurator
@@ -11,7 +9,6 @@ from certbot_apache._internal.configurator import OsOptions
from certbot import errors
from certbot import util
from certbot.errors import MisconfigurationError
logger = logging.getLogger(__name__)
@@ -79,82 +76,6 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
return CentOSParser(
self.options.server_root, self, self.options.vhost_root, self.version)
def _deploy_cert(self, *args: Any, **kwargs: Any) -> None: # pylint: disable=arguments-differ
"""
Override _deploy_cert in order to ensure that the Apache configuration
has "LoadModule ssl_module..." before parsing the VirtualHost configuration
that was created by Certbot
"""
super()._deploy_cert(*args, **kwargs)
if self.version < (2, 4, 0):
self._deploy_loadmodule_ssl_if_needed()
def _deploy_loadmodule_ssl_if_needed(self) -> None:
"""
Add "LoadModule ssl_module <pre-existing path>" to main httpd.conf if
it doesn't exist there already.
"""
loadmods = self.parser.find_dir("LoadModule", "ssl_module", exclude=False)
correct_ifmods: List[str] = []
loadmod_args: List[str] = []
loadmod_paths: List[str] = []
for m in loadmods:
noarg_path = m.rpartition("/")[0]
path_args = self.parser.get_all_args(noarg_path)
if loadmod_args:
if loadmod_args != path_args:
msg = ("Certbot encountered multiple LoadModule directives "
"for LoadModule ssl_module with differing library paths. "
"Please remove or comment out the one(s) that are not in "
"use, and run Certbot again.")
raise MisconfigurationError(msg)
else:
loadmod_args = [arg for arg in path_args if arg]
centos_parser: CentOSParser = cast(CentOSParser, self.parser)
if centos_parser.not_modssl_ifmodule(noarg_path):
if centos_parser.loc["default"] in noarg_path:
# LoadModule already in the main configuration file
if "ifmodule/" in noarg_path.lower() or "ifmodule[1]" in noarg_path.lower():
# It's the first or only IfModule in the file
return
# Populate the list of known !mod_ssl.c IfModules
nodir_path = noarg_path.rpartition("/directive")[0]
correct_ifmods.append(nodir_path)
else:
loadmod_paths.append(noarg_path)
if not loadmod_args:
# Do not try to enable mod_ssl
return
# Force creation as the directive wasn't found from the beginning of
# httpd.conf
rootconf_ifmod = self.parser.create_ifmod(
parser.get_aug_path(self.parser.loc["default"]),
"!mod_ssl.c", beginning=True)
# parser.get_ifmod returns a path postfixed with "/", remove that
self.parser.add_dir(rootconf_ifmod[:-1], "LoadModule", loadmod_args)
correct_ifmods.append(rootconf_ifmod[:-1])
self.save_notes += "Added LoadModule ssl_module to main configuration.\n"
# Wrap LoadModule mod_ssl inside of <IfModule !mod_ssl.c> if it's not
# configured like this already.
for loadmod_path in loadmod_paths:
nodir_path = loadmod_path.split("/directive")[0]
# Remove the old LoadModule directive
self.parser.aug.remove(loadmod_path)
# Create a new IfModule !mod_ssl.c if not already found on path
ssl_ifmod = self.parser.get_ifmod(nodir_path, "!mod_ssl.c", beginning=True)[:-1]
if ssl_ifmod not in correct_ifmods:
self.parser.add_dir(ssl_ifmod, "LoadModule", loadmod_args)
correct_ifmods.append(ssl_ifmod)
self.save_notes += ("Wrapped pre-existing LoadModule ssl_module "
"inside of <IfModule !mod_ssl> block.\n")
class CentOSParser(parser.ApacheParser):
"""CentOS specific ApacheParser override class"""
@@ -174,33 +95,3 @@ class CentOSParser(parser.ApacheParser):
defines = apache_util.parse_define_file(self.sysconfig_filep, "OPTIONS")
for k, v in defines.items():
self.variables[k] = v
def not_modssl_ifmodule(self, path: str) -> bool:
"""Checks if the provided Augeas path has argument !mod_ssl"""
if "ifmodule" not in path.lower():
return False
# Trim the path to the last ifmodule
workpath = path.lower()
while workpath:
# Get path to the last IfModule (ignore the tail)
parts = workpath.rpartition("ifmodule")
if not parts[0]:
# IfModule not found
break
ifmod_path = parts[0] + parts[1]
# Check if ifmodule had an index
if parts[2].startswith("["):
# Append the index from tail
ifmod_path += parts[2].partition("/")[0]
# Get the original path trimmed to correct length
# This is required to preserve cases
ifmod_real_path = path[0:len(ifmod_path)]
if "!mod_ssl.c" in self.get_all_args(ifmod_real_path):
return True
# Set the workpath to the heading part
workpath = parts[0]
return False

View File

@@ -47,6 +47,7 @@ class ApacheParser:
arg_var_interpreter: Pattern = re.compile(r"\$\{[^ \}]*}")
fnmatch_chars: Set[str] = {"*", "?", "\\", "[", "]"}
# pylint: disable=unused-argument
def __init__(self, root: str, configurator: "ApacheConfigurator",
vhostroot: str, version: Tuple[int, ...] = (2, 4)) -> None:
# Note: Order is important here.
@@ -74,9 +75,8 @@ class ApacheParser:
self.loc: Dict[str, str] = {"root": self._find_config_root()}
self.parse_file(self.loc["root"])
if version >= (2, 4):
# Look up variables from httpd and add to DOM if not already parsed
self.update_runtime_variables()
# Look up variables from httpd and add to DOM if not already parsed
self.update_runtime_variables()
# This problem has been fixed in Augeas 1.0
self.standardize_excl()
@@ -95,11 +95,6 @@ class ApacheParser:
self.parse_file(os.path.abspath(vhostroot) + "/" +
self.configurator.options.vhost_files)
# check to see if there were unparsed define statements
if version < (2, 4):
if self.find_dir("Define", exclude=False):
raise errors.PluginError("Error parsing runtime variables")
def check_parsing_errors(self, lens: str) -> None:
"""Verify Augeas can parse all of the lens files.
@@ -382,7 +377,7 @@ class ApacheParser:
for i, arg in enumerate(args):
self.aug.set("%s/arg[%d]" % (nvh_path, i + 1), arg)
def get_ifmod(self, aug_conf_path: str, mod: str, beginning: bool = False) -> str:
def get_ifmod(self, aug_conf_path: str, mod: str) -> str:
"""Returns the path to <IfMod mod> and creates one if it doesn't exist.
:param str aug_conf_path: Augeas configuration path
@@ -399,35 +394,26 @@ class ApacheParser:
if_mods = self.aug.match(("%s/IfModule/*[self::arg='%s']" %
(aug_conf_path, mod)))
if not if_mods:
return self.create_ifmod(aug_conf_path, mod, beginning)
return self.create_ifmod(aug_conf_path, mod)
# Strip off "arg" at end of first ifmod path
return if_mods[0].rpartition("arg")[0]
def create_ifmod(self, aug_conf_path: str, mod: str, beginning: bool = False) -> str:
def create_ifmod(self, aug_conf_path: str, mod: str) -> str:
"""Creates a new <IfMod mod> and returns its path.
:param str aug_conf_path: Augeas configuration path
:param str mod: module ie. mod_ssl.c
:param bool beginning: If the IfModule should be created to the beginning
of augeas path DOM tree.
:returns: Augeas path of the newly created IfModule directive.
The path may be dynamic, i.e. .../IfModule[last()]
:rtype: str
"""
if beginning:
c_path_arg = "{}/IfModule[1]/arg".format(aug_conf_path)
# Insert IfModule before the first directive
self.aug.insert("{}/directive[1]".format(aug_conf_path),
"IfModule", True)
retpath = "{}/IfModule[1]/".format(aug_conf_path)
else:
c_path = "{}/IfModule[last() + 1]".format(aug_conf_path)
c_path_arg = "{}/IfModule[last()]/arg".format(aug_conf_path)
self.aug.set(c_path, "")
retpath = "{}/IfModule[last()]/".format(aug_conf_path)
c_path = "{}/IfModule[last() + 1]".format(aug_conf_path)
c_path_arg = "{}/IfModule[last()]/arg".format(aug_conf_path)
self.aug.set(c_path, "")
retpath = "{}/IfModule[last()]/".format(aug_conf_path)
self.aug.set(c_path_arg, mod)
return retpath
@@ -587,20 +573,6 @@ class ApacheParser:
return ordered_matches
def get_all_args(self, match: str) -> List[Optional[str]]:
"""
Tries to fetch all arguments for a directive. See get_arg.
Note that if match is an ancestor node, it returns all names of
child directives as well as the list of arguments.
"""
if match[-1] != "/":
match = match + "/"
allargs = self.aug.match(match + '*')
return [self.get_arg(arg) for arg in allargs]
def get_arg(self, match: str) -> Optional[str]:
"""Uses augeas.get to get argument value and interprets result.

View File

@@ -1,7 +1,7 @@
from setuptools import find_packages
from setuptools import setup
version = '1.30.0.dev0'
version = '1.31.0.dev0'
install_requires = [
# We specify the minimum acme and certbot version as the current plugin

View File

@@ -1,228 +0,0 @@
"""Test for certbot_apache._internal.configurator for CentOS 6 overrides"""
import unittest
from unittest import mock
from certbot.compat import os
from certbot.errors import MisconfigurationError
from certbot_apache._internal import obj
from certbot_apache._internal import override_centos
from certbot_apache._internal import parser
import util
def get_vh_truth(temp_dir, config_name):
"""Return the ground truth for the specified directory."""
prefix = os.path.join(
temp_dir, config_name, "httpd/conf.d")
aug_pre = "/files" + prefix
vh_truth = [
obj.VirtualHost(
os.path.join(prefix, "test.example.com.conf"),
os.path.join(aug_pre, "test.example.com.conf/VirtualHost"),
{obj.Addr.fromstring("*:80")},
False, True, "test.example.com"),
obj.VirtualHost(
os.path.join(prefix, "ssl.conf"),
os.path.join(aug_pre, "ssl.conf/VirtualHost"),
{obj.Addr.fromstring("_default_:443")},
True, True, None)
]
return vh_truth
class CentOS6Tests(util.ApacheTest):
"""Tests for CentOS 6"""
def setUp(self): # pylint: disable=arguments-differ
test_dir = "centos6_apache/apache"
config_root = "centos6_apache/apache/httpd"
vhost_root = "centos6_apache/apache/httpd/conf.d"
super().setUp(test_dir=test_dir,
config_root=config_root,
vhost_root=vhost_root)
self.config = util.get_apache_configurator(
self.config_path, self.vhost_path, self.config_dir, self.work_dir,
version=(2, 2, 15), os_info="centos")
self.vh_truth = get_vh_truth(
self.temp_dir, "centos6_apache/apache")
def test_get_parser(self):
self.assertIsInstance(self.config.parser, override_centos.CentOSParser)
def test_get_virtual_hosts(self):
"""Make sure all vhosts are being properly found."""
vhs = self.config.get_virtual_hosts()
self.assertEqual(len(vhs), 2)
found = 0
for vhost in vhs:
for centos_truth in self.vh_truth:
if vhost == centos_truth:
found += 1
break
else:
raise Exception("Missed: %s" % vhost) # pragma: no cover
self.assertEqual(found, 2)
@mock.patch("certbot_apache._internal.configurator.display_util.notify")
def test_loadmod_default(self, unused_mock_notify):
ssl_loadmods = self.config.parser.find_dir(
"LoadModule", "ssl_module", exclude=False)
self.assertEqual(len(ssl_loadmods), 1)
# Make sure the LoadModule ssl_module is in ssl.conf (default)
self.assertIn("ssl.conf", ssl_loadmods[0])
# ...and that it's not inside of <IfModule>
self.assertNotIn("IfModule", ssl_loadmods[0])
# Get the example vhost
self.config.assoc["test.example.com"] = self.vh_truth[0]
self.config.deploy_cert(
"random.demo", "example/cert.pem", "example/key.pem",
"example/cert_chain.pem", "example/fullchain.pem")
self.config.save()
post_loadmods = self.config.parser.find_dir(
"LoadModule", "ssl_module", exclude=False)
# We should now have LoadModule ssl_module in root conf and ssl.conf
self.assertEqual(len(post_loadmods), 2)
for lm in post_loadmods:
# lm[:-7] removes "/arg[#]" from the path
arguments = self.config.parser.get_all_args(lm[:-7])
self.assertEqual(arguments, ["ssl_module", "modules/mod_ssl.so"])
# ...and both of them should be wrapped in <IfModule !mod_ssl.c>
# lm[:-17] strips off /directive/arg[1] from the path.
ifmod_args = self.config.parser.get_all_args(lm[:-17])
self.assertIn("!mod_ssl.c", ifmod_args)
@mock.patch("certbot_apache._internal.configurator.display_util.notify")
def test_loadmod_multiple(self, unused_mock_notify):
sslmod_args = ["ssl_module", "modules/mod_ssl.so"]
# Adds another LoadModule to main httpd.conf in addtition to ssl.conf
self.config.parser.add_dir(self.config.parser.loc["default"], "LoadModule",
sslmod_args)
self.config.save()
pre_loadmods = self.config.parser.find_dir(
"LoadModule", "ssl_module", exclude=False)
# LoadModules are not within IfModule blocks
self.assertIs(any("ifmodule" in m.lower() for m in pre_loadmods), False)
self.config.assoc["test.example.com"] = self.vh_truth[0]
self.config.deploy_cert(
"random.demo", "example/cert.pem", "example/key.pem",
"example/cert_chain.pem", "example/fullchain.pem")
post_loadmods = self.config.parser.find_dir(
"LoadModule", "ssl_module", exclude=False)
for mod in post_loadmods:
with self.subTest(mod=mod):
# pylint: disable=no-member
self.assertIs(self.config.parser.not_modssl_ifmodule(mod), True)
@mock.patch("certbot_apache._internal.configurator.display_util.notify")
def test_loadmod_rootconf_exists(self, unused_mock_notify):
sslmod_args = ["ssl_module", "modules/mod_ssl.so"]
rootconf_ifmod = self.config.parser.get_ifmod(
parser.get_aug_path(self.config.parser.loc["default"]),
"!mod_ssl.c", beginning=True)
self.config.parser.add_dir(rootconf_ifmod[:-1], "LoadModule", sslmod_args)
self.config.save()
# Get the example vhost
self.config.assoc["test.example.com"] = self.vh_truth[0]
self.config.deploy_cert(
"random.demo", "example/cert.pem", "example/key.pem",
"example/cert_chain.pem", "example/fullchain.pem")
self.config.save()
root_loadmods = self.config.parser.find_dir(
"LoadModule", "ssl_module",
start=parser.get_aug_path(self.config.parser.loc["default"]),
exclude=False)
mods = [lm for lm in root_loadmods if self.config.parser.loc["default"] in lm]
self.assertEqual(len(mods), 1)
# [:-7] removes "/arg[#]" from the path
self.assertEqual(
self.config.parser.get_all_args(mods[0][:-7]),
sslmod_args)
@mock.patch("certbot_apache._internal.configurator.display_util.notify")
def test_neg_loadmod_already_on_path(self, unused_mock_notify):
loadmod_args = ["ssl_module", "modules/mod_ssl.so"]
ifmod = self.config.parser.get_ifmod(
self.vh_truth[1].path, "!mod_ssl.c", beginning=True)
self.config.parser.add_dir(ifmod[:-1], "LoadModule", loadmod_args)
self.config.parser.add_dir(self.vh_truth[1].path, "LoadModule", loadmod_args)
self.config.save()
pre_loadmods = self.config.parser.find_dir(
"LoadModule", "ssl_module", start=self.vh_truth[1].path, exclude=False)
self.assertEqual(len(pre_loadmods), 2)
# The ssl.conf now has two LoadModule directives, one inside of
# !mod_ssl.c IfModule
self.config.assoc["test.example.com"] = self.vh_truth[0]
self.config.deploy_cert(
"random.demo", "example/cert.pem", "example/key.pem",
"example/cert_chain.pem", "example/fullchain.pem")
self.config.save()
# Ensure that the additional LoadModule wasn't written into the IfModule
post_loadmods = self.config.parser.find_dir(
"LoadModule", "ssl_module", start=self.vh_truth[1].path, exclude=False)
self.assertEqual(len(post_loadmods), 1)
def test_loadmod_non_duplicate(self):
# the modules/mod_ssl.so exists in ssl.conf
sslmod_args = ["ssl_module", "modules/mod_somethingelse.so"]
rootconf_ifmod = self.config.parser.get_ifmod(
parser.get_aug_path(self.config.parser.loc["default"]),
"!mod_ssl.c", beginning=True)
self.config.parser.add_dir(rootconf_ifmod[:-1], "LoadModule", sslmod_args)
self.config.save()
self.config.assoc["test.example.com"] = self.vh_truth[0]
pre_matches = self.config.parser.find_dir("LoadModule",
"ssl_module", exclude=False)
self.assertRaises(MisconfigurationError, self.config.deploy_cert,
"random.demo", "example/cert.pem", "example/key.pem",
"example/cert_chain.pem", "example/fullchain.pem")
post_matches = self.config.parser.find_dir("LoadModule",
"ssl_module", exclude=False)
# Make sure that none was changed
self.assertEqual(pre_matches, post_matches)
@mock.patch("certbot_apache._internal.configurator.display_util.notify")
def test_loadmod_not_found(self, unused_mock_notify):
# Remove all existing LoadModule ssl_module... directives
orig_loadmods = self.config.parser.find_dir("LoadModule",
"ssl_module",
exclude=False)
for mod in orig_loadmods:
noarg_path = mod.rpartition("/")[0]
self.config.parser.aug.remove(noarg_path)
self.config.save()
self.config.deploy_cert(
"random.demo", "example/cert.pem", "example/key.pem",
"example/cert_chain.pem", "example/fullchain.pem")
post_loadmods = self.config.parser.find_dir("LoadModule",
"ssl_module",
exclude=False)
self.assertEqual(post_loadmods, [])
def test_no_ifmod_search_false(self):
#pylint: disable=no-member
self.assertIs(self.config.parser.not_modssl_ifmodule(
"/path/does/not/include/ifmod"
), False)
self.assertIs(self.config.parser.not_modssl_ifmodule(
""
), False)
self.assertIs(self.config.parser.not_modssl_ifmodule(
"/path/includes/IfModule/but/no/arguments"
), False)
if __name__ == "__main__":
unittest.main() # pragma: no cover

View File

@@ -443,18 +443,6 @@ class MultipleVhostsTest(util.ApacheTest):
"SSLCertificateChainFile", "two/cert_chain.pem",
self.vh_truth[1].path))
def test_is_name_vhost(self):
addr = obj.Addr.fromstring("*:80")
self.assertIs(self.config.is_name_vhost(addr), True)
self.config.version = (2, 2)
self.assertIs(self.config.is_name_vhost(addr), False)
def test_add_name_vhost(self):
self.config.add_name_vhost(obj.Addr.fromstring("*:443"))
self.config.add_name_vhost(obj.Addr.fromstring("*:80"))
self.assertTrue(self.config.parser.find_dir("NameVirtualHost", "*:443", exclude=False))
self.assertTrue(self.config.parser.find_dir("NameVirtualHost", "*:80"))
def test_add_listen_80(self):
mock_find = mock.Mock()
mock_add_dir = mock.Mock()
@@ -642,9 +630,6 @@ class MultipleVhostsTest(util.ApacheTest):
self.assertIs(ssl_vhost.ssl, True)
self.assertIs(ssl_vhost.enabled, False)
self.assertEqual(self.config.is_name_vhost(self.vh_truth[0]),
self.config.is_name_vhost(ssl_vhost))
self.assertEqual(len(self.config.vhosts), 13)
def test_clean_vhost_ssl(self):
@@ -721,21 +706,6 @@ class MultipleVhostsTest(util.ApacheTest):
# pylint: disable=protected-access
self.assertIs(self.config._get_ssl_vhost_path("example_path").endswith(".conf"), True)
def test_add_name_vhost_if_necessary(self):
# pylint: disable=protected-access
self.config.add_name_vhost = mock.Mock()
self.config.version = (2, 2)
self.config._add_name_vhost_if_necessary(self.vh_truth[0])
self.assertIs(self.config.add_name_vhost.called, True)
new_addrs = set()
for addr in self.vh_truth[0].addrs:
new_addrs.add(obj.Addr(("_default_", addr.get_port(),)))
self.vh_truth[0].addrs = new_addrs
self.config._add_name_vhost_if_necessary(self.vh_truth[0])
self.assertEqual(self.config.add_name_vhost.call_count, 2)
@mock.patch("certbot_apache._internal.configurator.http_01.ApacheHttp01.perform")
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.restart")
def test_perform(self, mock_restart, mock_http_perform):
@@ -946,20 +916,6 @@ class MultipleVhostsTest(util.ApacheTest):
self.assertEqual(len(stapling_cache_aug_path), 1)
@mock.patch("certbot.util.exe_exists")
def test_ocsp_unsupported_apache_version(self, mock_exe):
mock_exe.return_value = True
self.config.parser.update_runtime_variables = mock.Mock()
self.config.parser.modules["mod_ssl.c"] = None
self.config.parser.modules["socache_shmcb_module"] = None
self.config.get_version = mock.Mock(return_value=(2, 2, 0))
self.config.choose_vhost("certbot.demo")
self.assertRaises(errors.PluginError,
self.config.enhance, "certbot.demo", "staple-ocsp")
def test_get_http_vhost_third_filter(self):
ssl_vh = obj.VirtualHost(
"fp", "ap", {obj.Addr(("*", "443"))},
@@ -1137,7 +1093,7 @@ class MultipleVhostsTest(util.ApacheTest):
self.config.parser.modules["rewrite_module"] = None
self.config.parser.update_runtime_variables = mock.Mock()
mock_exe.return_value = True
self.config.get_version = mock.Mock(return_value=(2, 2, 0))
self.config.get_version = mock.Mock(return_value=(2, 4, 0))
ssl_vhost = self.config.choose_vhost("certbot.demo")
@@ -1567,9 +1523,6 @@ class MultiVhostsTest(util.ApacheTest):
self.assertIs(ssl_vhost.ssl, True)
self.assertIs(ssl_vhost.enabled, False)
self.assertEqual(self.config.is_name_vhost(self.vh_truth[1]),
self.config.is_name_vhost(ssl_vhost))
mock_path = "certbot_apache._internal.configurator.ApacheConfigurator._get_new_vh_path"
with mock.patch(mock_path) as mock_getpath:
mock_getpath.return_value = None

View File

@@ -53,15 +53,6 @@ class ApacheHttp01Test(util.ApacheTest):
def test_empty_perform(self):
self.assertEqual(len(self.http.perform()), 0)
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.enable_mod")
def test_enable_modules_apache_2_2(self, mock_enmod):
self.config.version = (2, 2)
del self.config.parser.modules["authz_host_module"]
del self.config.parser.modules["mod_authz_host.c"]
enmod_calls = self.common_enable_modules_test(mock_enmod)
self.assertEqual(enmod_calls[0][0][0], "authz_host")
@mock.patch("certbot_apache._internal.configurator.ApacheConfigurator.enable_mod")
def test_enable_modules_apache_2_4(self, mock_enmod):
del self.config.parser.modules["authz_core_module"]
@@ -143,21 +134,12 @@ class ApacheHttp01Test(util.ApacheTest):
self.config.config.http01_port = 12345
self.assertRaises(errors.PluginError, self.http.perform)
def test_perform_1_achall_apache_2_2(self):
self.combinations_perform_test(num_achalls=1, minor_version=2)
def test_perform_1_achall_apache_2_4(self):
self.combinations_perform_test(num_achalls=1, minor_version=4)
def test_perform_2_achall_apache_2_2(self):
self.combinations_perform_test(num_achalls=2, minor_version=2)
def test_perform_2_achall_apache_2_4(self):
self.combinations_perform_test(num_achalls=2, minor_version=4)
def test_perform_3_achall_apache_2_2(self):
self.combinations_perform_test(num_achalls=3, minor_version=2)
def test_perform_3_achall_apache_2_4(self):
self.combinations_perform_test(num_achalls=3, minor_version=4)
@@ -230,10 +212,7 @@ class ApacheHttp01Test(util.ApacheTest):
self.assertIn("RewriteRule", pre_conf_contents)
self.assertIn(self.http.challenge_dir, post_conf_contents)
if self.config.version < (2, 4):
self.assertIn("Allow from all", post_conf_contents)
else:
self.assertIn("Require all granted", post_conf_contents)
self.assertIn("Require all granted", post_conf_contents)
def _test_challenge_file(self, achall):
name = os.path.join(self.http.challenge_dir, achall.chall.encode("token"))

View File

@@ -370,15 +370,6 @@ class ParserInitTest(util.ApacheTest):
ApacheParser, os.path.relpath(self.config_path), self.config,
"/dummy/vhostpath", version=(2, 4, 22))
@mock.patch("certbot_apache._internal.apache_util._get_runtime_cfg")
def test_unparseable(self, mock_cfg):
from certbot_apache._internal.parser import ApacheParser
mock_cfg.return_value = ('Define: TEST')
self.assertRaises(
errors.PluginError,
ApacheParser, os.path.relpath(self.config_path), self.config,
"/dummy/vhostpath", version=(2, 2, 22))
def test_root_normalized(self):
from certbot_apache._internal.parser import ApacheParser

View File

@@ -1,9 +0,0 @@
This directory holds Apache 2.0 module-specific configuration files;
any files in this directory which have the ".conf" extension will be
processed as Apache configuration files.
Files are processed in alphabetical order, so if using configuration
directives which depend on, say, mod_perl being loaded, ensure that
these are placed in a filename later in the sort order than "perl.conf".

View File

@@ -1,222 +0,0 @@
#
# This is the Apache server configuration file providing SSL support.
# It contains the configuration directives to instruct the server how to
# serve pages over an https connection. For detailing information about these
# directives see <URL:http://httpd.apache.org/docs/2.2/mod/mod_ssl.html>
#
# Do NOT simply read the instructions in here without understanding
# what they do. They're here only as hints or reminders. If you are unsure
# consult the online docs. You have been warned.
#
LoadModule ssl_module modules/mod_ssl.so
#
# When we also provide SSL we have to listen to the
# the HTTPS port in addition.
#
Listen 443
##
## SSL Global Context
##
## All SSL configuration in this context applies both to
## the main server and all SSL-enabled virtual hosts.
##
# Pass Phrase Dialog:
# Configure the pass phrase gathering process.
# The filtering dialog program (`builtin' is an internal
# terminal dialog) has to provide the pass phrase on stdout.
SSLPassPhraseDialog builtin
# Inter-Process Session Cache:
# Configure the SSL Session Cache: First the mechanism
# to use and second the expiring timeout (in seconds).
SSLSessionCache shmcb:/var/cache/mod_ssl/scache(512000)
SSLSessionCacheTimeout 300
# Semaphore:
# Configure the path to the mutual exclusion semaphore the
# SSL engine uses internally for inter-process synchronization.
SSLMutex default
# Pseudo Random Number Generator (PRNG):
# Configure one or more sources to seed the PRNG of the
# SSL library. The seed data should be of good random quality.
# WARNING! On some platforms /dev/random blocks if not enough entropy
# is available. This means you then cannot use the /dev/random device
# because it would lead to very long connection times (as long as
# it requires to make more entropy available). But usually those
# platforms additionally provide a /dev/urandom device which doesn't
# block. So, if available, use this one instead. Read the mod_ssl User
# Manual for more details.
SSLRandomSeed startup file:/dev/urandom 256
SSLRandomSeed connect builtin
#SSLRandomSeed startup file:/dev/random 512
#SSLRandomSeed connect file:/dev/random 512
#SSLRandomSeed connect file:/dev/urandom 512
#
# Use "SSLCryptoDevice" to enable any supported hardware
# accelerators. Use "openssl engine -v" to list supported
# engine names. NOTE: If you enable an accelerator and the
# server does not start, consult the error logs and ensure
# your accelerator is functioning properly.
#
SSLCryptoDevice builtin
#SSLCryptoDevice ubsec
##
## SSL Virtual Host Context
##
<VirtualHost _default_:443>
# General setup for the virtual host, inherited from global configuration
#DocumentRoot "/var/www/html"
#ServerName www.example.com:443
# Use separate log files for the SSL virtual host; note that LogLevel
# is not inherited from httpd.conf.
ErrorLog logs/ssl_error_log
TransferLog logs/ssl_access_log
LogLevel warn
# SSL Engine Switch:
# Enable/Disable SSL for this virtual host.
SSLEngine on
# SSL Protocol support:
# List the enable protocol levels with which clients will be able to
# connect. Disable SSLv2 access by default:
SSLProtocol all -SSLv2
# SSL Cipher Suite:
# List the ciphers that the client is permitted to negotiate.
# See the mod_ssl documentation for a complete list.
SSLCipherSuite DEFAULT:!EXP:!SSLv2:!DES:!IDEA:!SEED:+3DES
# Server Certificate:
# Point SSLCertificateFile at a PEM encoded certificate. If
# the certificate is encrypted, then you will be prompted for a
# pass phrase. Note that a kill -HUP will prompt again. A new
# certificate can be generated using the genkey(1) command.
SSLCertificateFile /etc/pki/tls/certs/localhost.crt
# Server Private Key:
# If the key is not combined with the certificate, use this
# directive to point at the key file. Keep in mind that if
# you've both a RSA and a DSA private key you can configure
# both in parallel (to also allow the use of DSA ciphers, etc.)
SSLCertificateKeyFile /etc/pki/tls/private/localhost.key
# Server Certificate Chain:
# Point SSLCertificateChainFile at a file containing the
# concatenation of PEM encoded CA certificates which form the
# certificate chain for the server certificate. Alternatively
# the referenced file can be the same as SSLCertificateFile
# when the CA certificates are directly appended to the server
# certificate for convinience.
#SSLCertificateChainFile /etc/pki/tls/certs/server-chain.crt
# Certificate Authority (CA):
# Set the CA certificate verification path where to find CA
# certificates for client authentication or alternatively one
# huge file containing all of them (file must be PEM encoded)
#SSLCACertificateFile /etc/pki/tls/certs/ca-bundle.crt
# Client Authentication (Type):
# Client certificate verification type and depth. Types are
# none, optional, require and optional_no_ca. Depth is a
# number which specifies how deeply to verify the certificate
# issuer chain before deciding the certificate is not valid.
#SSLVerifyClient require
#SSLVerifyDepth 10
# Access Control:
# With SSLRequire you can do per-directory access control based
# on arbitrary complex boolean expressions containing server
# variable checks and other lookup directives. The syntax is a
# mixture between C and Perl. See the mod_ssl documentation
# for more details.
#<Location />
#SSLRequire ( %{SSL_CIPHER} !~ m/^(EXP|NULL)/ \
# and %{SSL_CLIENT_S_DN_O} eq "Snake Oil, Ltd." \
# and %{SSL_CLIENT_S_DN_OU} in {"Staff", "CA", "Dev"} \
# and %{TIME_WDAY} >= 1 and %{TIME_WDAY} <= 5 \
# and %{TIME_HOUR} >= 8 and %{TIME_HOUR} <= 20 ) \
# or %{REMOTE_ADDR} =~ m/^192\.76\.162\.[0-9]+$/
#</Location>
# SSL Engine Options:
# Set various options for the SSL engine.
# o FakeBasicAuth:
# Translate the client X.509 into a Basic Authorisation. This means that
# the standard Auth/DBMAuth methods can be used for access control. The
# user name is the `one line' version of the client's X.509 certificate.
# Note that no password is obtained from the user. Every entry in the user
# file needs this password: `xxj31ZMTZzkVA'.
# o ExportCertData:
# This exports two additional environment variables: SSL_CLIENT_CERT and
# SSL_SERVER_CERT. These contain the PEM-encoded certificates of the
# server (always existing) and the client (only existing when client
# authentication is used). This can be used to import the certificates
# into CGI scripts.
# o StdEnvVars:
# This exports the standard SSL/TLS related `SSL_*' environment variables.
# Per default this exportation is switched off for performance reasons,
# because the extraction step is an expensive operation and is usually
# useless for serving static content. So one usually enables the
# exportation for CGI and SSI requests only.
# o StrictRequire:
# This denies access when "SSLRequireSSL" or "SSLRequire" applied even
# under a "Satisfy any" situation, i.e. when it applies access is denied
# and no other module can change it.
# o OptRenegotiate:
# This enables optimized SSL connection renegotiation handling when SSL
# directives are used in per-directory context.
#SSLOptions +FakeBasicAuth +ExportCertData +StrictRequire
<Files ~ "\.(cgi|shtml|phtml|php3?)$">
SSLOptions +StdEnvVars
</Files>
<Directory "/var/www/cgi-bin">
SSLOptions +StdEnvVars
</Directory>
# SSL Protocol Adjustments:
# The safe and default but still SSL/TLS standard compliant shutdown
# approach is that mod_ssl sends the close notify alert but doesn't wait for
# the close notify alert from client. When you need a different shutdown
# approach you can use one of the following variables:
# o ssl-unclean-shutdown:
# This forces an unclean shutdown when the connection is closed, i.e. no
# SSL close notify alert is send or allowed to received. This violates
# the SSL/TLS standard but is needed for some brain-dead browsers. Use
# this when you receive I/O errors because of the standard approach where
# mod_ssl sends the close notify alert.
# o ssl-accurate-shutdown:
# This forces an accurate shutdown when the connection is closed, i.e. a
# SSL close notify alert is send and mod_ssl waits for the close notify
# alert of the client. This is 100% SSL/TLS standard compliant, but in
# practice often causes hanging connections with brain-dead browsers. Use
# this only for browsers where you know that their SSL implementation
# works correctly.
# Notice: Most problems of broken clients are also related to the HTTP
# keep-alive facility, so you usually additionally want to disable
# keep-alive for those clients, too. Use variable "nokeepalive" for this.
# Similarly, one has to force some clients to use HTTP/1.0 to workaround
# their broken HTTP/1.1 implementation. Use variables "downgrade-1.0" and
# "force-response-1.0" for this.
SetEnvIf User-Agent ".*MSIE.*" \
nokeepalive ssl-unclean-shutdown \
downgrade-1.0 force-response-1.0
# Per-Server Logging:
# The home of a custom SSL log file. Use this when you want a
# compact non-error SSL logfile on a virtual host basis.
CustomLog logs/ssl_request_log \
"%t %h %{SSL_PROTOCOL}x %{SSL_CIPHER}x \"%r\" %b"
</VirtualHost>

View File

@@ -1,7 +0,0 @@
<VirtualHost *:80>
ServerName test.example.com
ServerAdmin webmaster@dummy-host.example.com
DocumentRoot /var/www/htdocs
ErrorLog logs/dummy-host.example.com-error_log
CustomLog logs/dummy-host.example.com-access_log common
</VirtualHost>

View File

@@ -1,11 +0,0 @@
#
# This configuration file enables the default "Welcome"
# page if there is no default index page present for
# the root URL. To disable the Welcome page, comment
# out all the lines below.
#
<LocationMatch "^/+$">
Options -Indexes
ErrorDocument 403 /error/noindex.html
</LocationMatch>

File diff suppressed because it is too large Load Diff

View File

@@ -33,8 +33,8 @@ def assert_elliptic_key(key: str, curve: Type[EllipticCurve]) -> None:
key = load_pem_private_key(data=privkey1, password=None, backend=default_backend())
assert isinstance(key, EllipticCurvePrivateKey)
assert isinstance(key.curve, curve)
assert isinstance(key, EllipticCurvePrivateKey), f"should be an EC key but was {type(key)}"
assert isinstance(key.curve, curve), f"should have curve {curve} but was {key.curve}"
def assert_rsa_key(key: str, key_size: Optional[int] = None) -> None:
@@ -125,7 +125,7 @@ def assert_equals_world_read_permissions(file1: str, file2: str) -> None:
mode_file1 = os.stat(file1).st_mode & 0o004
mode_file2 = os.stat(file2).st_mode & 0o004
else:
everybody = win32security.ConvertStringSidToSid(EVERYBODY_SID)
everybody = win32security.ConvertStringSidToSid(EVERYBODY_SID) # pylint: disable=used-before-assignment
security1 = win32security.GetFileSecurity(file1, win32security.DACL_SECURITY_INFORMATION)
dacl1 = security1.GetSecurityDescriptorDacl()
@@ -135,7 +135,7 @@ def assert_equals_world_read_permissions(file1: str, file2: str) -> None:
'TrusteeType': win32security.TRUSTEE_IS_USER,
'Identifier': everybody,
})
mode_file1 = mode_file1 & ntsecuritycon.FILE_GENERIC_READ
mode_file1 = mode_file1 & ntsecuritycon.FILE_GENERIC_READ # pylint: disable=used-before-assignment
security2 = win32security.GetFileSecurity(file2, win32security.DACL_SECURITY_INFORMATION)
dacl2 = security2.GetSecurityDescriptorDacl()

View File

@@ -507,6 +507,19 @@ def test_new_key(context: IntegrationTestsContext) -> None:
assert_saved_lineage_option(context.config_dir, certname, 'reuse_key', 'True')
assert_elliptic_key(privkey4_path, SECP256R1)
# certonly: it should not be possible to change a key parameter without --new-key
with pytest.raises(subprocess.CalledProcessError) as error:
context.certbot(['certonly', '-d', certname, '--reuse-key',
'--elliptic-curve', 'secp384r1'])
assert 'Unable to change the --elliptic-curve' in error.value.stderr
# certonly: not specifying --key-type should keep the existing key type (non-interactively).
# TODO: when ECDSA is made default key type, the key types must be inverted
context.certbot(['certonly', '-d', certname, '--no-reuse-key'])
privkey5, privkey5_path = private_key(5)
assert_elliptic_key(privkey5_path, SECP256R1)
assert privkey4 != privkey5
def test_incorrect_key_type(context: IntegrationTestsContext) -> None:
with pytest.raises(subprocess.CalledProcessError):

View File

@@ -133,18 +133,9 @@ class ACMEServer:
acme_xdist['directory_url'] = BOULDER_V2_DIRECTORY_URL
acme_xdist['challtestsrv_url'] = BOULDER_V2_CHALLTESTSRV_URL
acme_xdist['http_port'] = {
node: port for (node, port) in # pylint: disable=unnecessary-comprehension
zip(nodes, range(5200, 5200 + len(nodes)))
}
acme_xdist['https_port'] = {
node: port for (node, port) in # pylint: disable=unnecessary-comprehension
zip(nodes, range(5100, 5100 + len(nodes)))
}
acme_xdist['other_port'] = {
node: port for (node, port) in # pylint: disable=unnecessary-comprehension
zip(nodes, range(5300, 5300 + len(nodes)))
}
acme_xdist['http_port'] = dict(zip(nodes, range(5200, 5200 + len(nodes))))
acme_xdist['https_port'] = dict(zip(nodes, range(5100, 5100 + len(nodes))))
acme_xdist['other_port'] = dict(zip(nodes, range(5300, 5300 + len(nodes))))
self.acme_xdist = acme_xdist

View File

@@ -33,7 +33,6 @@ from acme import messages
from certbot import achallenges
from certbot import errors as le_errors
from certbot._internal.display import obj as display_obj
from certbot.display import util as display_util
from certbot.tests import acme_util
DESCRIPTION = """
@@ -339,7 +338,7 @@ def setup_logging(args: argparse.Namespace) -> None:
def setup_display() -> None:
""""Prepares a display utility instance for the Certbot plugins """
displayer = display_util.NoninteractiveDisplay(sys.stdout)
displayer = display_obj.NoninteractiveDisplay(sys.stdout)
display_obj.set_display(displayer)

View File

@@ -1,7 +1,7 @@
from setuptools import find_packages
from setuptools import setup
version = '1.30.0.dev0'
version = '1.31.0.dev0'
install_requires = [
'certbot',

View File

@@ -4,7 +4,7 @@ import sys
from setuptools import find_packages
from setuptools import setup
version = '1.30.0.dev0'
version = '1.31.0.dev0'
install_requires = [
'cloudflare>=1.5.1',

View File

@@ -1,190 +0,0 @@
Copyright 2015 Electronic Frontier Foundation and others
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS

View File

@@ -1,7 +0,0 @@
include LICENSE.txt
include README.rst
recursive-include docs *
recursive-include tests *
include certbot_dns_cloudxns/py.typed
global-exclude __pycache__
global-exclude *.py[cod]

View File

@@ -1 +0,0 @@
CloudXNS DNS Authenticator plugin for Certbot

View File

@@ -1,90 +0,0 @@
"""
The `~certbot_dns_cloudxns.dns_cloudxns` plugin automates the process of
completing a ``dns-01`` challenge (`~acme.challenges.DNS01`) by creating, and
subsequently removing, TXT records using the CloudXNS API.
.. note::
The plugin is not installed by default. It can be installed by heading to
`certbot.eff.org <https://certbot.eff.org/instructions#wildcard>`_, choosing your system and
selecting the Wildcard tab.
Named Arguments
---------------
======================================== =====================================
``--dns-cloudxns-credentials`` CloudXNS credentials_ INI file.
(Required)
``--dns-cloudxns-propagation-seconds`` The number of seconds to wait for DNS
to propagate before asking the ACME
server to verify the DNS record.
(Default: 30)
======================================== =====================================
Credentials
-----------
Use of this plugin requires a configuration file containing CloudXNS API
credentials, obtained from your CloudXNS
`API page <https://www.cloudxns.net/en/AccountManage/apimanage.html>`_.
.. code-block:: ini
:name: credentials.ini
:caption: Example credentials file:
# CloudXNS API credentials used by Certbot
dns_cloudxns_api_key = 1234567890abcdef1234567890abcdef
dns_cloudxns_secret_key = 1122334455667788
The path to this file can be provided interactively or using the
``--dns-cloudxns-credentials`` command-line argument. Certbot records the path
to this file for use during renewal, but does not store the file's contents.
.. caution::
You should protect these API credentials as you would the password to your
CloudXNS account. Users who can read this file can use these credentials to
issue arbitrary API calls on your behalf. Users who can cause Certbot to run
using these credentials can complete a ``dns-01`` challenge to acquire new
certificates or revoke existing certificates for associated domains, even if
those domains aren't being managed by this server.
Certbot will emit a warning if it detects that the credentials file can be
accessed by other users on your system. The warning reads "Unsafe permissions
on credentials configuration file", followed by the path to the credentials
file. This warning will be emitted each time Certbot uses the credentials file,
including for renewal, and cannot be silenced except by addressing the issue
(e.g., by using a command like ``chmod 600`` to restrict access to the file).
Examples
--------
.. code-block:: bash
:caption: To acquire a certificate for ``example.com``
certbot certonly \\
--dns-cloudxns \\
--dns-cloudxns-credentials ~/.secrets/certbot/cloudxns.ini \\
-d example.com
.. code-block:: bash
:caption: To acquire a single certificate for both ``example.com`` and
``www.example.com``
certbot certonly \\
--dns-cloudxns \\
--dns-cloudxns-credentials ~/.secrets/certbot/cloudxns.ini \\
-d example.com \\
-d www.example.com
.. code-block:: bash
:caption: To acquire a certificate for ``example.com``, waiting 60 seconds
for DNS propagation
certbot certonly \\
--dns-cloudxns \\
--dns-cloudxns-credentials ~/.secrets/certbot/cloudxns.ini \\
--dns-cloudxns-propagation-seconds 60 \\
-d example.com
"""

View File

@@ -1 +0,0 @@
"""Internal implementation of `~certbot_dns_cloudxns.dns_cloudxns` plugin."""

View File

@@ -1,93 +0,0 @@
"""DNS Authenticator for CloudXNS DNS."""
import logging
from typing import Any
from typing import Callable
from typing import Optional
from lexicon.providers import cloudxns
from requests import HTTPError
from certbot import errors
from certbot.plugins import dns_common
from certbot.plugins import dns_common_lexicon
from certbot.plugins.dns_common import CredentialsConfiguration
logger = logging.getLogger(__name__)
ACCOUNT_URL = 'https://www.cloudxns.net/en/AccountManage/apimanage.html'
class Authenticator(dns_common.DNSAuthenticator):
"""DNS Authenticator for CloudXNS DNS
This Authenticator uses the CloudXNS DNS API to fulfill a dns-01 challenge.
"""
description = 'Obtain certificates using a DNS TXT record (if you are using CloudXNS for DNS).'
ttl = 60
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
self.credentials: Optional[CredentialsConfiguration] = None
@classmethod
def add_parser_arguments(cls, add: Callable[..., None],
default_propagation_seconds: int = 30) -> None:
super().add_parser_arguments(add, default_propagation_seconds)
add('credentials', help='CloudXNS credentials INI file.')
def more_info(self) -> str:
return 'This plugin configures a DNS TXT record to respond to a dns-01 challenge using ' + \
'the CloudXNS API.'
def _setup_credentials(self) -> None:
self.credentials = self._configure_credentials(
'credentials',
'CloudXNS credentials INI file',
{
'api-key': 'API key for CloudXNS account, obtained from {0}'.format(ACCOUNT_URL),
'secret-key': 'Secret key for CloudXNS account, obtained from {0}'
.format(ACCOUNT_URL)
}
)
def _perform(self, domain: str, validation_name: str, validation: str) -> None:
self._get_cloudxns_client().add_txt_record(domain, validation_name, validation)
def _cleanup(self, domain: str, validation_name: str, validation: str) -> None:
self._get_cloudxns_client().del_txt_record(domain, validation_name, validation)
def _get_cloudxns_client(self) -> "_CloudXNSLexiconClient":
if not self.credentials: # pragma: no cover
raise errors.Error("Plugin has not been prepared.")
return _CloudXNSLexiconClient(self.credentials.conf('api-key'),
self.credentials.conf('secret-key'),
self.ttl)
class _CloudXNSLexiconClient(dns_common_lexicon.LexiconClient):
"""
Encapsulates all communication with the CloudXNS via Lexicon.
"""
def __init__(self, api_key: str, secret_key: str, ttl: int) -> None:
super().__init__()
config = dns_common_lexicon.build_lexicon_config('cloudxns', {
'ttl': ttl,
}, {
'auth_username': api_key,
'auth_token': secret_key,
})
self.provider = cloudxns.Provider(config)
def _handle_http_error(self, e: HTTPError, domain_name: str) -> Optional[errors.PluginError]:
hint = None
if str(e).startswith('400 Client Error:'):
hint = 'Are your API key and Secret key values correct?'
hint_disp = f' ({hint})' if hint else ''
return errors.PluginError(f'Error determining zone identifier for {domain_name}: '
f'{e}.{hint_disp}')

View File

@@ -1 +0,0 @@
/_build/

View File

@@ -1,20 +0,0 @@
# Minimal makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
SPHINXPROJ = certbot-dns-cloudxns
SOURCEDIR = .
BUILDDIR = _build
# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
.PHONY: help Makefile
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)

View File

@@ -1,5 +0,0 @@
=================
API Documentation
=================
Certbot plugins implement the Certbot plugins API, and do not otherwise have an external API.

View File

@@ -1,181 +0,0 @@
# -*- coding: utf-8 -*-
#
# certbot-dns-cloudxns documentation build configuration file, created by
# sphinx-quickstart on Wed May 10 16:05:50 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.viewcode']
autodoc_member_order = 'bysource'
autodoc_default_flags = ['show-inheritance']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'certbot-dns-cloudxns'
copyright = u'2017, Certbot Project'
author = u'Certbot Project'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'0'
# The full version, including alpha/beta/rc tags.
release = u'0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = 'en'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
default_role = 'py:obj'
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
# https://docs.readthedocs.io/en/stable/faq.html#i-want-to-use-the-read-the-docs-theme-locally
# on_rtd is whether we are on readthedocs.org
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# otherwise, readthedocs.org uses their theme by default, so no need to specify it
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'certbot-dns-cloudxnsdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'certbot-dns-cloudxns.tex', u'certbot-dns-cloudxns Documentation',
u'Certbot Project', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'certbot-dns-cloudxns', u'certbot-dns-cloudxns Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'certbot-dns-cloudxns', u'certbot-dns-cloudxns Documentation',
author, 'certbot-dns-cloudxns', 'One line description of project.',
'Miscellaneous'),
]
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
'python': ('https://docs.python.org/', None),
'acme': ('https://acme-python.readthedocs.org/en/latest/', None),
'certbot': ('https://eff-certbot.readthedocs.io/en/stable/', None),
}

View File

@@ -1,28 +0,0 @@
.. certbot-dns-cloudxns documentation master file, created by
sphinx-quickstart on Wed May 10 16:05:50 2017.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
Welcome to certbot-dns-cloudxns's documentation!
================================================
.. toctree::
:maxdepth: 2
:caption: Contents:
.. automodule:: certbot_dns_cloudxns
:members:
.. toctree::
:maxdepth: 1
api
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`

View File

@@ -1,36 +0,0 @@
@ECHO OFF
pushd %~dp0
REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set SOURCEDIR=.
set BUILDDIR=_build
set SPHINXPROJ=certbot-dns-cloudxns
if "%1" == "" goto help
%SPHINXBUILD% >NUL 2>NUL
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.https://www.sphinx-doc.org/
exit /b 1
)
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
goto end
:help
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
:end
popd

View File

@@ -1,15 +0,0 @@
# readthedocs.org gives no way to change the install command to "pip
# install -e certbot-dns-cloudxns[docs]" (that would in turn install documentation
# dependencies), but it allows to specify a requirements.txt file at
# https://readthedocs.org/dashboard/letsencrypt/advanced/ (c.f. #259)
# Although ReadTheDocs certainly doesn't need to install the project
# in --editable mode (-e), just "pip install certbot-dns-cloudxns[docs]" does not work as
# expected and "pip install -e certbot-dns-cloudxns[docs]" must be used instead
# We also pin our dependencies for increased stability.
-c ../tools/requirements.txt
-e acme
-e certbot
-e certbot-dns-cloudxns[docs]

View File

@@ -1,73 +0,0 @@
import os
import sys
from setuptools import find_packages
from setuptools import setup
version = '1.30.0.dev0'
install_requires = [
'dns-lexicon>=3.2.1',
'setuptools>=41.6.0',
]
if not os.environ.get('SNAP_BUILD'):
install_requires.extend([
# We specify the minimum acme and certbot version as the current plugin
# version for simplicity. See
# https://github.com/certbot/certbot/issues/8761 for more info.
f'acme>={version}',
f'certbot>={version}',
])
elif 'bdist_wheel' in sys.argv[1:]:
raise RuntimeError('Unset SNAP_BUILD when building wheels '
'to include certbot dependencies.')
if os.environ.get('SNAP_BUILD'):
install_requires.append('packaging')
docs_extras = [
'Sphinx>=1.0', # autodoc_member_order = 'bysource', autodoc_default_flags
'sphinx_rtd_theme',
]
setup(
name='certbot-dns-cloudxns',
version=version,
description="CloudXNS DNS Authenticator plugin for Certbot",
url='https://github.com/certbot/certbot',
author="Certbot Project",
author_email='certbot-dev@eff.org',
license='Apache License 2.0',
python_requires='>=3.7',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Plugins',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Security',
'Topic :: System :: Installation/Setup',
'Topic :: System :: Networking',
'Topic :: System :: Systems Administration',
'Topic :: Utilities',
],
packages=find_packages(),
include_package_data=True,
install_requires=install_requires,
extras_require={
'docs': docs_extras,
},
entry_points={
'certbot.plugins': [
'dns-cloudxns = certbot_dns_cloudxns._internal.dns_cloudxns:Authenticator',
],
},
)

View File

@@ -1,58 +0,0 @@
"""Tests for certbot_dns_cloudxns._internal.dns_cloudxns."""
import unittest
try:
import mock
except ImportError: # pragma: no cover
from unittest import mock # type: ignore
from requests.exceptions import HTTPError
from requests.exceptions import RequestException
from certbot.compat import os
from certbot.plugins import dns_test_common
from certbot.plugins import dns_test_common_lexicon
from certbot.tests import util as test_util
DOMAIN_NOT_FOUND = Exception('No domain found')
GENERIC_ERROR = RequestException
LOGIN_ERROR = HTTPError('400 Client Error: ...')
API_KEY = 'foo'
SECRET = 'bar'
class AuthenticatorTest(test_util.TempDirTestCase,
dns_test_common_lexicon.BaseLexiconAuthenticatorTest):
def setUp(self):
super().setUp()
from certbot_dns_cloudxns._internal.dns_cloudxns import Authenticator
path = os.path.join(self.tempdir, 'file.ini')
dns_test_common.write({"cloudxns_api_key": API_KEY, "cloudxns_secret_key": SECRET}, path)
self.config = mock.MagicMock(cloudxns_credentials=path,
cloudxns_propagation_seconds=0) # don't wait during tests
self.auth = Authenticator(self.config, "cloudxns")
self.mock_client = mock.MagicMock()
# _get_cloudxns_client | pylint: disable=protected-access
self.auth._get_cloudxns_client = mock.MagicMock(return_value=self.mock_client)
class CloudXNSLexiconClientTest(unittest.TestCase, dns_test_common_lexicon.BaseLexiconClientTest):
def setUp(self):
from certbot_dns_cloudxns._internal.dns_cloudxns import _CloudXNSLexiconClient
self.client = _CloudXNSLexiconClient(API_KEY, SECRET, 0)
self.provider_mock = mock.MagicMock()
self.client.provider = self.provider_mock
if __name__ == "__main__":
unittest.main() # pragma: no cover

View File

@@ -4,7 +4,7 @@ import sys
from setuptools import find_packages
from setuptools import setup
version = '1.30.0.dev0'
version = '1.31.0.dev0'
install_requires = [
'python-digitalocean>=1.11', # 1.15.0 or newer is recommended for TTL support

View File

@@ -4,7 +4,7 @@ import sys
from setuptools import find_packages
from setuptools import setup
version = '1.30.0.dev0'
version = '1.31.0.dev0'
install_requires = [
# This version of lexicon is required to address the problem described in

View File

@@ -4,7 +4,7 @@ import sys
from setuptools import find_packages
from setuptools import setup
version = '1.30.0.dev0'
version = '1.31.0.dev0'
install_requires = [
'dns-lexicon>=3.2.1',

View File

@@ -4,7 +4,7 @@ import sys
from setuptools import find_packages
from setuptools import setup
version = '1.30.0.dev0'
version = '1.31.0.dev0'
install_requires = [
'dns-lexicon>=3.2.1',

View File

@@ -4,7 +4,7 @@ import sys
from setuptools import find_packages
from setuptools import setup
version = '1.30.0.dev0'
version = '1.31.0.dev0'
install_requires = [
'google-api-python-client>=1.5.5',

View File

@@ -4,7 +4,7 @@ import sys
from setuptools import find_packages
from setuptools import setup
version = '1.30.0.dev0'
version = '1.31.0.dev0'
install_requires = [
'dns-lexicon>=3.2.1',

View File

@@ -4,7 +4,7 @@ import sys
from setuptools import find_packages
from setuptools import setup
version = '1.30.0.dev0'
version = '1.31.0.dev0'
install_requires = [
'dns-lexicon>=3.2.1',

View File

@@ -4,7 +4,7 @@ import sys
from setuptools import find_packages
from setuptools import setup
version = '1.30.0.dev0'
version = '1.31.0.dev0'
install_requires = [
'dns-lexicon>=3.2.1',

View File

@@ -4,7 +4,7 @@ import sys
from setuptools import find_packages
from setuptools import setup
version = '1.30.0.dev0'
version = '1.31.0.dev0'
install_requires = [
'dns-lexicon>=3.2.1',

View File

@@ -4,7 +4,7 @@ import sys
from setuptools import find_packages
from setuptools import setup
version = '1.30.0.dev0'
version = '1.31.0.dev0'
install_requires = [
'dnspython>=1.15.0',

View File

@@ -38,7 +38,8 @@ class Authenticator(dns_common.DNSAuthenticator):
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
self.r53 = boto3.client("route53")
self._resource_records: DefaultDict[str, List[Dict[str, str]]] = collections.defaultdict(list)
self._resource_records: DefaultDict[str, List[Dict[str, str]]] = \
collections.defaultdict(list)
def more_info(self) -> str:
return "Solve a DNS01 challenge using AWS Route53"

View File

@@ -4,7 +4,7 @@ import sys
from setuptools import find_packages
from setuptools import setup
version = '1.30.0.dev0'
version = '1.31.0.dev0'
install_requires = [
'boto3>=1.15.15',

View File

@@ -4,7 +4,7 @@ import sys
from setuptools import find_packages
from setuptools import setup
version = '1.30.0.dev0'
version = '1.31.0.dev0'
install_requires = [
'dns-lexicon>=3.2.1',

View File

@@ -1,7 +1,7 @@
from setuptools import find_packages
from setuptools import setup
version = '1.30.0.dev0'
version = '1.31.0.dev0'
install_requires = [
# We specify the minimum acme and certbot version as the current plugin

View File

@@ -2,7 +2,7 @@
Certbot adheres to [Semantic Versioning](https://semver.org/).
## 1.30.0 - master
## 1.31.0 - master
### Added
@@ -18,6 +18,35 @@ Certbot adheres to [Semantic Versioning](https://semver.org/).
More details about these changes can be found on our GitHub repo.
## 1.30.0 - 2022-09-07
### Added
*
### Changed
* `acme.client.ClientBase`, `acme.messages.Authorization.resolved_combinations`,
`acme.messages.Authorization.combinations`, `acme.mixins`, `acme.fields.resource`,
and `acme.fields.Resource` are deprecated and will be removed in a future release.
* `acme.messages.OLD_ERROR_PREFIX` (`urn:acme:error:`) is deprecated and support for
the old ACME error prefix in Certbot will be removed in the next major release of
Certbot.
* `acme.messages.Directory.register` is deprecated and will be removed in the next
major release of Certbot. Furthermore, `.Directory` will only support lookups
by the exact resource name string in the ACME directory (e.g. `directory['newOrder']`).
* The `certbot-dns-cloudxns` plugin is now deprecated and will be removed in the
next major release of Certbot.
* The `source_address` argument for `acme.client.ClientNetwork` is deprecated
and support for it will be removed in the next major release.
* Add UI text suggesting users create certs for multiple domains, when possible
### Fixed
*
More details about these changes can be found on our GitHub repo.
## 1.29.0 - 2022-07-05
### Added

View File

@@ -1,3 +1,3 @@
"""Certbot client."""
# version number like 1.2.3a0, must have at least 2 parts, like 1.2
__version__ = '1.30.0.dev0'
__version__ = '1.31.0.dev0'

View File

@@ -20,7 +20,7 @@ import pytz
from acme import fields as acme_fields
from acme import messages
from acme.client import ClientBase
from acme.client import ClientV2
from certbot import configuration
from certbot import errors
from certbot import interfaces
@@ -114,7 +114,7 @@ class AccountMemoryStorage(interfaces.AccountStorage):
def find_all(self) -> List[Account]:
return list(self.accounts.values())
def save(self, account: Account, client: ClientBase) -> None:
def save(self, account: Account, client: ClientV2) -> None:
if account.id in self.accounts:
logger.debug("Overwriting account: %s", account.id)
self.accounts[account.id] = account
@@ -243,11 +243,11 @@ class AccountFileStorage(interfaces.AccountStorage):
def load(self, account_id: str) -> Account:
return self._load_for_server_path(account_id, self.config.server_path)
def save(self, account: Account, client: ClientBase) -> None:
def save(self, account: Account, client: ClientV2) -> None:
"""Create a new account.
:param Account account: account to create
:param ClientBase client: ACME client associated to the account
:param ClientV2 client: ACME client associated to the account
"""
try:
@@ -258,11 +258,11 @@ class AccountFileStorage(interfaces.AccountStorage):
except IOError as error:
raise errors.AccountStorageError(error)
def update_regr(self, account: Account, client: ClientBase) -> None:
def update_regr(self, account: Account, client: ClientV2) -> None:
"""Update the registration resource.
:param Account account: account to update
:param ClientBase client: ACME client associated to the account
:param ClientV2 client: ACME client associated to the account
"""
try:
@@ -358,7 +358,7 @@ class AccountFileStorage(interfaces.AccountStorage):
with util.safe_open(self._key_path(dir_path), "w", chmod=0o400) as key_file:
key_file.write(account.key.json_dumps())
def _update_regr(self, account: Account, acme: ClientBase, dir_path: str) -> None:
def _update_regr(self, account: Account, acme: ClientV2, dir_path: str) -> None:
with open(self._regr_path(dir_path), "w") as regr_file:
regr = account.regr
# If we have a value for new-authz, save it for forwards

View File

@@ -36,7 +36,7 @@ class AuthHandler:
:class:`~acme.challenges.Challenge` types
:type auth: certbot.interfaces.Authenticator
:ivar acme.client.BackwardsCompatibleClientV2 acme_client: ACME client API.
:ivar acme.client.ClientV2 acme_client: ACME client API.
:ivar account: Client's Account
:type account: :class:`certbot._internal.account.Account`
@@ -226,15 +226,10 @@ class AuthHandler:
logger.info("Performing the following challenges:")
for authzr in pending_authzrs:
authzr_challenges = authzr.body.challenges
if self.acme.acme_version == 1:
combinations = authzr.body.combinations
else:
combinations = tuple((i,) for i in range(len(authzr_challenges)))
path = gen_challenge_path(
authzr_challenges,
self._get_chall_pref(authzr.body.identifier.value),
combinations)
self._get_chall_pref(authzr.body.identifier.value))
achalls.extend(self._challenge_factory(authzr, path))
@@ -387,12 +382,9 @@ def challb_to_achall(challb: messages.ChallengeBody, account_key: josepy.JWK,
def gen_challenge_path(challbs: List[messages.ChallengeBody],
preferences: List[Type[challenges.Challenge]],
combinations: Tuple[Tuple[int, ...], ...]) -> Tuple[int, ...]:
preferences: List[Type[challenges.Challenge]]) -> Tuple[int, ...]:
"""Generate a plan to get authority over the identity.
.. todo:: This can be possibly be rewritten to use resolved_combinations.
:param tuple challbs: A tuple of challenges
(:class:`acme.messages.Challenge`) from
:class:`acme.messages.AuthorizationResource` to be
@@ -402,10 +394,6 @@ def gen_challenge_path(challbs: List[messages.ChallengeBody],
:param list preferences: List of challenge preferences for domain
(:class:`acme.challenges.Challenge` subclasses)
:param tuple combinations: A collection of sets of challenges from
:class:`acme.messages.Challenge`, each of which would
be sufficient to prove possession of the identifier.
:returns: list of indices from ``challenges``.
:rtype: list
@@ -413,21 +401,6 @@ def gen_challenge_path(challbs: List[messages.ChallengeBody],
path cannot be created that satisfies the CA given the preferences and
combinations.
"""
if combinations:
return _find_smart_path(challbs, preferences, combinations)
return _find_dumb_path(challbs, preferences)
def _find_smart_path(challbs: List[messages.ChallengeBody],
preferences: List[Type[challenges.Challenge]],
combinations: Tuple[Tuple[int, ...], ...]
) -> Tuple[int, ...]:
"""Find challenge path with server hints.
Can be called if combinations is included. Function uses a simple
ranking system to choose the combo with the lowest cost.
"""
chall_cost = {}
max_cost = 1
@@ -441,6 +414,8 @@ def _find_smart_path(challbs: List[messages.ChallengeBody],
# Set above completing all of the available challenges
best_combo_cost = max_cost
combinations = tuple((i,) for i in range(len(challbs)))
combo_total = 0
for combo in combinations:
for challenge_index in combo:
@@ -459,28 +434,6 @@ def _find_smart_path(challbs: List[messages.ChallengeBody],
return best_combo
def _find_dumb_path(challbs: List[messages.ChallengeBody],
preferences: List[Type[challenges.Challenge]]) -> Tuple[int, ...]:
"""Find challenge path without server hints.
Should be called if the combinations hint is not included by the
server. This function either returns a path containing all
challenges provided by the CA or raises an exception.
"""
path = []
for i, challb in enumerate(challbs):
# supported is set to True if the challenge type is supported
supported = next((True for pref_c in preferences
if isinstance(challb.chall, pref_c)), False)
if supported:
path.append(i)
else:
raise _report_no_chall_path(challbs)
return tuple(path)
def _report_no_chall_path(challbs: List[messages.ChallengeBody]) -> errors.AuthorizationError:
"""Logs and return a raisable error reporting that no satisfiable chall path exists.

View File

@@ -45,10 +45,6 @@ def _plugins_parsing(helpful: "helpful.HelpfulArgumentParser",
default=flag_default("dns_cloudflare"),
help=("Obtain certificates using a DNS TXT record (if you are "
"using Cloudflare for DNS)."))
helpful.add(["plugins", "certonly"], "--dns-cloudxns", action="store_true",
default=flag_default("dns_cloudxns"),
help=("Obtain certificates using a DNS TXT record (if you are "
"using CloudXNS for DNS)."))
helpful.add(["plugins", "certonly"], "--dns-digitalocean", action="store_true",
default=flag_default("dns_digitalocean"),
help=("Obtain certificates using a DNS TXT record (if you are "

View File

@@ -10,7 +10,6 @@ from typing import IO
from typing import List
from typing import Optional
from typing import Tuple
import warnings
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric.rsa import generate_private_key
@@ -70,16 +69,8 @@ def acme_from_config_key(config: configuration.NamespaceConfig, key: jose.JWK,
verify_ssl=(not config.no_verify_ssl),
user_agent=determine_user_agent(config))
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
client = acme_client.BackwardsCompatibleClientV2(net, key, config.server)
if client.acme_version == 1:
logger.warning(
"Certbot is configured to use an ACMEv1 server (%s). ACMEv1 support is deprecated"
" and will soon be removed. See https://community.letsencrypt.org/t/143839 for "
"more information.", config.server)
return cast(acme_client.ClientV2, client)
directory = acme_client.ClientV2.get_directory(config.server, net)
return acme_client.ClientV2(directory, net)
def determine_user_agent(config: configuration.NamespaceConfig) -> str:
@@ -256,18 +247,13 @@ def perform_registration(acme: acme_client.ClientV2, config: configuration.Names
" Please use --eab-kid and --eab-hmac-key.")
raise errors.Error(msg)
tos = acme.directory.meta.terms_of_service
if tos_cb and tos:
tos_cb(tos)
try:
newreg = messages.NewRegistration.from_data(
email=config.email, external_account_binding=eab)
# Until ACME v1 support is removed from Certbot, we actually need the provided
# ACME client to be a wrapper of type BackwardsCompatibleClientV2.
# TODO: Remove this cast and rewrite the logic when the client is actually a ClientV2
try:
return cast(acme_client.BackwardsCompatibleClientV2,
acme).new_account_and_tos(newreg, tos_cb)
except AttributeError:
raise errors.Error("The ACME client must be an instance of "
"acme.client.BackwardsCompatibleClientV2")
return acme.new_account(messages.NewRegistration.from_data(
email=config.email, terms_of_service_agreed=True, external_account_binding=eab))
except messages.Error as e:
if e.code in ("invalidEmail", "invalidContact"):
if config.noninteractive_mode:
@@ -291,8 +277,8 @@ class Client:
:ivar .Authenticator auth: Prepared (`.Authenticator.prepare`)
authenticator that can solve ACME challenges.
:ivar .Installer installer: Installer.
:ivar acme.client.BackwardsCompatibleClientV2 acme: Optional ACME
client API handle. You might already have one from `register`.
:ivar acme.client.ClientV2 acme: Optional ACME client API handle. You might
already have one from `register`.
"""

View File

@@ -112,7 +112,6 @@ CLI_DEFAULTS: Dict[str, Any] = dict( # noqa
manual=False,
webroot=False,
dns_cloudflare=False,
dns_cloudxns=False,
dns_digitalocean=False,
dns_dnsimple=False,
dns_dnsmadeeasy=False,

View File

@@ -10,11 +10,7 @@ from typing import Tuple
from typing import TypeVar
from typing import Union
import zope.component
import zope.interface
from certbot import errors
from certbot import interfaces
from certbot._internal import constants
from certbot._internal.display import completer
from certbot._internal.display import util
@@ -34,6 +30,7 @@ SIDE_FRAME = ("- " * 39) + "-"
"""Display boundary (alternates spaces, so when copy-pasted, markdown doesn't interpret
it as a heading)"""
# This class holds the global state of the display service. Using this class
# eliminates potential gotchas that exist if self.display was just a global
# variable. In particular, in functions `_DISPLAY = <value>` would create a
@@ -50,9 +47,6 @@ _SERVICE = _DisplayService()
T = TypeVar("T")
# This use of IDisplay can be removed when this class is no longer accessible
# through the public API in certbot.display.util.
@zope.interface.implementer(interfaces.IDisplay)
class FileDisplay:
"""File-based display."""
# see https://github.com/certbot/certbot/issues/3915
@@ -410,9 +404,6 @@ class FileDisplay:
return OK, selection
# This use of IDisplay can be removed when this class is no longer accessible
# through the public API in certbot.display.util.
@zope.interface.implementer(interfaces.IDisplay)
class NoninteractiveDisplay:
"""A display utility implementation that never asks for interactive user input"""
@@ -573,8 +564,4 @@ def set_display(display: Union[FileDisplay, NoninteractiveDisplay]) -> None:
:param Union[FileDisplay, NoninteractiveDisplay] display: the display service
"""
# This call is done only for retro-compatibility purposes.
# TODO: Remove this call once zope dependencies are removed from Certbot.
zope.component.provideUtility(display, interfaces.IDisplay)
_SERVICE.display = display

View File

@@ -194,7 +194,7 @@ class _WindowsLockMechanism(_BaseLockMechanism):
low level APIs, and Python does not do it. As of Python 3.7 and below, Python developers
state that deleting a file opened by a process from another process is not possible with
os.open and io.open.
Consequently, mscvrt.locking is sufficient to obtain an effective lock, and the race
Consequently, msvcrt.locking is sufficient to obtain an effective lock, and the race
condition encountered on Linux is not possible on Windows, leading to a simpler workflow.
"""
def acquire(self) -> None:
@@ -209,7 +209,7 @@ class _WindowsLockMechanism(_BaseLockMechanism):
# This "type: ignore" is currently needed because msvcrt methods
# are only defined on Windows. See
# https://github.com/python/typeshed/blob/16ae4c61201cd8b96b8b22cdfb2ab9e89ba5bcf2/stdlib/msvcrt.pyi.
msvcrt.locking(fd, msvcrt.LK_NBLCK, 1) # type: ignore
msvcrt.locking(fd, msvcrt.LK_NBLCK, 1) # type: ignore # pylint: disable=used-before-assignment
except (IOError, OSError) as err:
if fd:
os.close(fd)
@@ -229,7 +229,7 @@ class _WindowsLockMechanism(_BaseLockMechanism):
# This "type: ignore" is currently needed because msvcrt methods
# are only defined on Windows. See
# https://github.com/python/typeshed/blob/16ae4c61201cd8b96b8b22cdfb2ab9e89ba5bcf2/stdlib/msvcrt.pyi.
msvcrt.locking(self._fd, msvcrt.LK_UNLCK, 1) # type: ignore
msvcrt.locking(self._fd, msvcrt.LK_UNLCK, 1) # type: ignore # pylint: disable=used-before-assignment
os.close(self._fd)
try:

View File

@@ -17,8 +17,6 @@ from typing import Union
import configobj
import josepy as jose
import zope.component
import zope.interface
from acme import client as acme_client
from acme import errors as acme_errors
@@ -38,7 +36,6 @@ from certbot._internal import eff
from certbot._internal import hooks
from certbot._internal import log
from certbot._internal import renewal
from certbot._internal import reporter
from certbot._internal import snap_config
from certbot._internal import storage
from certbot._internal import updater
@@ -1165,15 +1162,14 @@ def plugins_cmd(config: configuration.NamespaceConfig,
return
filtered.init(config)
verified = filtered.verify(ifaces)
logger.debug("Verified plugins: %r", verified)
logger.debug("Filtered plugins: %r", filtered)
if not config.prepare:
notify(str(verified))
notify(str(filtered))
return
verified.prepare()
available = verified.available()
filtered.prepare()
available = filtered.available()
logger.debug("Prepared plugins: %s", available)
notify(str(available))
@@ -1654,8 +1650,8 @@ def make_or_verify_needed_dirs(config: configuration.NamespaceConfig) -> None:
@contextmanager
def make_displayer(config: configuration.NamespaceConfig
) -> Generator[Union[display_util.NoninteractiveDisplay,
display_util.FileDisplay], None, None]:
) -> Generator[Union[display_obj.NoninteractiveDisplay,
display_obj.FileDisplay], None, None]:
"""Creates a display object appropriate to the flags in the supplied config.
:param config: Configuration object
@@ -1663,18 +1659,18 @@ def make_displayer(config: configuration.NamespaceConfig
:returns: Display object
"""
displayer: Union[None, display_util.NoninteractiveDisplay,
display_util.FileDisplay] = None
displayer: Union[None, display_obj.NoninteractiveDisplay,
display_obj.FileDisplay] = None
devnull: Optional[IO] = None
if config.quiet:
config.noninteractive_mode = True
devnull = open(os.devnull, "w") # pylint: disable=consider-using-with
displayer = display_util.NoninteractiveDisplay(devnull)
displayer = display_obj.NoninteractiveDisplay(devnull)
elif config.noninteractive_mode:
displayer = display_util.NoninteractiveDisplay(sys.stdout)
displayer = display_obj.NoninteractiveDisplay(sys.stdout)
else:
displayer = display_util.FileDisplay(
displayer = display_obj.FileDisplay(
sys.stdout, config.force_interactive)
try:
@@ -1716,10 +1712,6 @@ def main(cli_args: List[str] = None) -> Optional[Union[str, int]]:
args = cli.prepare_and_parse_args(plugins, cli_args)
config = configuration.NamespaceConfig(args)
# This call is done only for retro-compatibility purposes.
# TODO: Remove this call once zope dependencies are removed from Certbot.
zope.component.provideUtility(config, interfaces.IConfig)
# On windows, shell without administrative right cannot create symlinks required by certbot.
# So we check the rights before continuing.
misc.raise_for_non_administrative_windows_rights()
@@ -1732,12 +1724,6 @@ def main(cli_args: List[str] = None) -> Optional[Union[str, int]]:
if config.func != plugins_cmd: # pylint: disable=comparison-with-callable
raise
# These calls are done only for retro-compatibility purposes.
# TODO: Remove these calls once zope dependencies are removed from Certbot.
report = reporter.Reporter(config)
zope.component.provideUtility(report, interfaces.IReporter)
util.atexit_register(report.print_messages)
with make_displayer(config) as displayer:
display_obj.set_display(displayer)

View File

@@ -12,11 +12,8 @@ from typing import Mapping
from typing import Optional
from typing import Type
from typing import Union
import warnings
import pkg_resources
import zope.interface
import zope.interface.verify
from certbot import configuration
from certbot import errors
@@ -31,7 +28,6 @@ PREFIX_FREE_DISTRIBUTIONS = [
"certbot",
"certbot-apache",
"certbot-dns-cloudflare",
"certbot-dns-cloudxns",
"certbot-dns-digitalocean",
"certbot-dns-dnsimple",
"certbot-dns-dnsmadeeasy",
@@ -116,7 +112,7 @@ class PluginEntryPoint:
def ifaces(self, *ifaces_groups: Iterable[Type]) -> bool:
"""Does plugin implements specified interface groups?"""
return not ifaces_groups or any(
all(_implements(self.plugin_cls, iface)
all(issubclass(self.plugin_cls, iface)
for iface in ifaces)
for ifaces in ifaces_groups)
@@ -134,16 +130,6 @@ class PluginEntryPoint:
self._initialized = self.plugin_cls(config, self.name)
return self._initialized
def verify(self, ifaces: Iterable[Type]) -> bool:
"""Verify that the plugin conforms to the specified interfaces."""
if not self.initialized:
raise ValueError("Plugin is not initialized.")
for iface in ifaces: # zope.interface.providedBy(plugin)
if not _verify(self.init(), self.plugin_cls, iface):
return False
return True
@property
def prepared(self) -> bool:
"""Has the plugin been prepared already?"""
@@ -265,7 +251,7 @@ class PluginsRegistry(Mapping):
plugin2 = other_ep.entry_point.dist.key if other_ep.entry_point.dist else "unknown"
raise Exception("Duplicate plugin name {0} from {1} and {2}.".format(
plugin_ep.name, plugin1, plugin2))
if _provides(plugin_ep.plugin_cls, interfaces.Plugin):
if issubclass(plugin_ep.plugin_cls, interfaces.Plugin):
plugins[plugin_ep.name] = plugin_ep
else: # pragma: no cover
logger.warning(
@@ -300,10 +286,6 @@ class PluginsRegistry(Mapping):
"""Filter plugins based on interfaces."""
return self.filter(lambda p_ep: p_ep.ifaces(*ifaces_groups))
def verify(self, ifaces: Iterable[Type]) -> "PluginsRegistry":
"""Filter plugins based on verification."""
return self.filter(lambda p_ep: p_ep.verify(ifaces))
def prepare(self) -> List[Union[bool, Error]]:
"""Prepare all plugins in the registry."""
return [plugin_ep.prepare() for plugin_ep in self._plugins.values()]
@@ -342,88 +324,3 @@ class PluginsRegistry(Mapping):
if not self._plugins:
return "No plugins"
return "\n\n".join(str(p_ep) for p_ep in self._plugins.values())
_DEPRECATION_PLUGIN = ("Zope interface certbot.interfaces.IPlugin is deprecated, "
"use ABC certbot.interface.Plugin instead.")
_DEPRECATION_AUTHENTICATOR = ("Zope interface certbot.interfaces.IAuthenticator is deprecated, "
"use ABC certbot.interface.Authenticator instead.")
_DEPRECATION_INSTALLER = ("Zope interface certbot.interfaces.IInstaller is deprecated, "
"use ABC certbot.interface.Installer instead.")
_DEPRECATION_FACTORY = ("Zope interface certbot.interfaces.IPluginFactory is deprecated, "
"use ABC certbot.interface.Plugin instead.")
def _provides(target_class: Type[interfaces.Plugin], iface: Type) -> bool:
if issubclass(target_class, iface):
return True
if iface == interfaces.Plugin and interfaces.IPluginFactory.providedBy(target_class):
logging.warning(_DEPRECATION_FACTORY)
warnings.warn(_DEPRECATION_FACTORY, DeprecationWarning)
return True
return False
def _implements(target_class: Type[interfaces.Plugin], iface: Type) -> bool:
if issubclass(target_class, iface):
return True
if iface == interfaces.Plugin and interfaces.IPlugin.implementedBy(target_class):
logging.warning(_DEPRECATION_PLUGIN)
warnings.warn(_DEPRECATION_PLUGIN, DeprecationWarning)
return True
if iface == interfaces.Authenticator and interfaces.IAuthenticator.implementedBy(target_class):
logging.warning(_DEPRECATION_AUTHENTICATOR)
warnings.warn(_DEPRECATION_AUTHENTICATOR, DeprecationWarning)
return True
if iface == interfaces.Installer and interfaces.IInstaller.implementedBy(target_class):
logging.warning(_DEPRECATION_INSTALLER)
warnings.warn(_DEPRECATION_INSTALLER, DeprecationWarning)
return True
return False
def _verify(target_instance: interfaces.Plugin, target_class: Type[interfaces.Plugin],
iface: Type) -> bool:
if issubclass(target_class, iface):
# No need to trigger some verify logic for ABCs: when the object is instantiated,
# an error would be raised if implementation is not done properly.
# So the checks have been done effectively when the plugin has been initialized.
return True
zope_iface: Optional[Type[zope.interface.Interface]] = None
message = ""
if iface == interfaces.Plugin:
zope_iface = interfaces.IPlugin
message = _DEPRECATION_PLUGIN
if iface == interfaces.Authenticator:
zope_iface = interfaces.IAuthenticator
message = _DEPRECATION_AUTHENTICATOR
if iface == interfaces.Installer:
zope_iface = interfaces.IInstaller
message = _DEPRECATION_INSTALLER
if not zope_iface:
raise ValueError(f"Unexpected type: {iface.__name__}")
try:
zope.interface.verify.verifyObject(zope_iface, target_instance)
logging.warning(message)
warnings.warn(message, DeprecationWarning)
return True
except zope.interface.exceptions.BrokenImplementation as error:
if zope_iface.implementedBy(target_class):
logger.debug(
"%s implements %s but object does not verify: %s",
target_class, zope_iface.__name__, error, exc_info=True)
return False

View File

@@ -65,7 +65,6 @@ def get_unprepared_installer(config: configuration.NamespaceConfig,
return None
installers = plugins.filter(lambda p_ep: p_ep.check_name(req_inst))
installers.init(config)
installers = installers.verify((interfaces.Installer,))
if len(installers) > 1:
raise errors.PluginSelectionError(
"Found multiple installers with the name %s, Certbot is unable to "
@@ -116,9 +115,8 @@ def pick_plugin(config: configuration.NamespaceConfig, default: Optional[str],
filtered = plugins.visible().ifaces(ifaces)
filtered.init(config)
verified = filtered.verify(ifaces)
verified.prepare()
prepared = verified.available()
filtered.prepare()
prepared = filtered.available()
if len(prepared) > 1:
logger.debug("Multiple candidate plugins: %s", prepared)
@@ -168,7 +166,7 @@ def choose_plugin(prepared: List[disco.PluginEntryPoint],
return None
noninstaller_plugins = ["webroot", "manual", "standalone", "dns-cloudflare", "dns-cloudxns",
noninstaller_plugins = ["webroot", "manual", "standalone", "dns-cloudflare",
"dns-digitalocean", "dns-dnsimple", "dns-dnsmadeeasy", "dns-gehirn",
"dns-google", "dns-linode", "dns-luadns", "dns-nsone", "dns-ovh",
"dns-rfc2136", "dns-route53", "dns-sakuracloud"]
@@ -316,8 +314,6 @@ def cli_plugin_requests(config: configuration.NamespaceConfig
req_auth = set_configurator(req_auth, "manual")
if config.dns_cloudflare:
req_auth = set_configurator(req_auth, "dns-cloudflare")
if config.dns_cloudxns:
req_auth = set_configurator(req_auth, "dns-cloudxns")
if config.dns_digitalocean:
req_auth = set_configurator(req_auth, "dns-digitalocean")
if config.dns_dnsimple:

View File

@@ -19,12 +19,10 @@ from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric import ec
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.primitives.serialization import load_pem_private_key
import zope.component
from certbot import configuration
from certbot import crypto_util
from certbot import errors
from certbot import interfaces
from certbot import util
from certbot._internal import cli
from certbot._internal import client
@@ -326,12 +324,57 @@ def _avoid_invalidating_lineage(config: configuration.NamespaceConfig,
"unless you use the --break-my-certs flag!")
def _avoid_reuse_key_conflicts(config: configuration.NamespaceConfig,
lineage: storage.RenewableCert) -> None:
"""Don't allow combining --reuse-key with any flags that would conflict
with key reuse (--key-type, --rsa-key-size, --elliptic-curve), unless
--new-key is also set.
"""
# If --no-reuse-key is set, no conflict
if cli.set_by_cli("reuse_key") and not config.reuse_key:
return
# If reuse_key is not set on the lineage and --reuse-key is not
# set on the CLI, no conflict.
if not lineage.reuse_key and not config.reuse_key:
return
# If --new-key is set, no conflict
if config.new_key:
return
kt = config.key_type.lower()
# The remaining cases where conflicts are present:
# - --key-type is set on the CLI and doesn't match the stored private key
# - It's an RSA key and --rsa-key-size is set and doesn't match
# - It's an ECDSA key and --eliptic-curve is set and doesn't match
potential_conflicts = [
("--key-type",
lambda: kt != lineage.private_key_type.lower()),
("--rsa-key-type",
lambda: kt == "rsa" and config.rsa_key_size != lineage.rsa_key_size),
("--elliptic-curve",
lambda: kt == "ecdsa" and lineage.elliptic_curve and \
config.elliptic_curve.lower() != lineage.elliptic_curve.lower())
]
for conflict in potential_conflicts:
if conflict[1]():
raise errors.Error(
f"Unable to change the {conflict[0]} of this certificate because --reuse-key "
"is set. To stop reusing the private key, specify --no-reuse-key. "
"To change the private key this one time and then reuse it in future, "
"add --new-key.")
def renew_cert(config: configuration.NamespaceConfig, domains: Optional[List[str]],
le_client: client.Client, lineage: storage.RenewableCert) -> None:
"""Renew a certificate lineage."""
renewal_params = lineage.configuration["renewalparams"]
original_server = renewal_params.get("server", cli.flag_default("server"))
_avoid_invalidating_lineage(config, lineage, original_server)
_avoid_reuse_key_conflicts(config, lineage)
if not domains:
domains = lineage.names()
# The private key is the existing lineage private key if reuse_key is set.
@@ -460,9 +503,6 @@ def handle_renewal_request(config: configuration.NamespaceConfig) -> None:
if not renewal_candidate:
parse_failures.append(renewal_file)
else:
# This call is done only for retro-compatibility purposes.
# TODO: Remove this call once zope dependencies are removed from Certbot.
zope.component.provideUtility(lineage_config, interfaces.IConfig)
renewal_candidate.ensure_deployed()
from certbot._internal import main
plugins = plugins_disco.PluginsRegistry.find_all()

View File

@@ -1,94 +0,0 @@
"""Collects and displays information to the user."""
import collections
import logging
import queue
import sys
import textwrap
from certbot import configuration
from certbot import util
logger = logging.getLogger(__name__)
class Reporter:
"""Collects and displays information to the user.
:ivar `queue.PriorityQueue` messages: Messages to be displayed to
the user.
"""
HIGH_PRIORITY = 0
"""High priority constant. See `add_message`."""
MEDIUM_PRIORITY = 1
"""Medium priority constant. See `add_message`."""
LOW_PRIORITY = 2
"""Low priority constant. See `add_message`."""
_msg_type = collections.namedtuple('_msg_type', 'priority text on_crash')
def __init__(self, config: configuration.NamespaceConfig) -> None:
self.messages: "queue.PriorityQueue[Reporter._msg_type]" = queue.PriorityQueue()
self.config = config
def add_message(self, msg: str, priority: int, on_crash: bool = True) -> None:
"""Adds msg to the list of messages to be printed.
:param str msg: Message to be displayed to the user.
:param int priority: One of `HIGH_PRIORITY`, `MEDIUM_PRIORITY`,
or `LOW_PRIORITY`.
:param bool on_crash: Whether or not the message should be
printed if the program exits abnormally.
"""
assert self.HIGH_PRIORITY <= priority <= self.LOW_PRIORITY
self.messages.put(self._msg_type(priority, msg, on_crash))
logger.debug("Reporting to user: %s", msg)
def print_messages(self) -> None:
"""Prints messages to the user and clears the message queue.
If there is an unhandled exception, only messages for which
``on_crash`` is ``True`` are printed.
"""
bold_on = False
if not self.messages.empty():
no_exception = sys.exc_info()[0] is None
bold_on = sys.stdout.isatty()
if not self.config.quiet:
if bold_on:
print(util.ANSI_SGR_BOLD)
print('IMPORTANT NOTES:')
first_wrapper = textwrap.TextWrapper(
initial_indent=' - ',
subsequent_indent=(' ' * 3),
break_long_words=False,
break_on_hyphens=False)
next_wrapper = textwrap.TextWrapper(
initial_indent=first_wrapper.subsequent_indent,
subsequent_indent=first_wrapper.subsequent_indent,
break_long_words=False,
break_on_hyphens=False)
while not self.messages.empty():
msg = self.messages.get()
if self.config.quiet:
# In --quiet mode, we only print high priority messages that
# are flagged for crash cases
if not (msg.priority == self.HIGH_PRIORITY and msg.on_crash):
continue
if no_exception or msg.on_crash:
if bold_on and msg.priority > self.HIGH_PRIORITY:
if not self.config.quiet:
sys.stdout.write(util.ANSI_SGR_RESET)
bold_on = False
lines = msg.text.splitlines()
print(first_wrapper.fill(lines[0]))
if len(lines) > 1:
print("\n".join(
next_wrapper.fill(line) for line in lines[1:]))
if bold_on and not self.config.quiet:
sys.stdout.write(util.ANSI_SGR_RESET)

View File

@@ -12,10 +12,12 @@ from typing import List
from typing import Mapping
from typing import Optional
from typing import Tuple
from typing import Union
import configobj
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey
from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePrivateKey
from cryptography.hazmat.primitives.serialization import load_pem_private_key
import parsedatetime
import pkg_resources
@@ -569,6 +571,12 @@ class RenewableCert(interfaces.RenewableCert):
return util.is_staging(self.server)
return False
@property
def reuse_key(self) -> bool:
"""Returns whether this certificate is configured to reuse its private key"""
return "reuse_key" in self.configuration["renewalparams"] and \
self.configuration["renewalparams"].as_bool("reuse_key")
def _check_symlinks(self) -> None:
"""Raises an exception if a symlink doesn't exist"""
for kind in ALL_FOUR:
@@ -1115,22 +1123,47 @@ class RenewableCert(interfaces.RenewableCert):
target, values)
return cls(new_config.filename, cli_config)
@property
def private_key_type(self) -> str:
"""
:returns: The type of algorithm for the private, RSA or ECDSA
:rtype: str
"""
def _private_key(self) -> Union[RSAPrivateKey, EllipticCurvePrivateKey]:
with open(self.configuration["privkey"], "rb") as priv_key_file:
key = load_pem_private_key(
data=priv_key_file.read(),
password=None,
backend=default_backend()
)
return key
@property
def private_key_type(self) -> str:
"""
:returns: The type of algorithm for the private, RSA or ECDSA
:rtype: str
"""
key = self._private_key()
if isinstance(key, RSAPrivateKey):
return "RSA"
else:
return "ECDSA"
return "ECDSA"
@property
def rsa_key_size(self) -> Optional[int]:
"""
:returns: If the private key is an RSA key, its size.
:rtype: int
"""
key = self._private_key()
if isinstance(key, RSAPrivateKey):
return key.key_size
return None
@property
def elliptic_curve(self) -> Optional[str]:
"""
:returns: If the private key is an elliptic key, the name of its curve.
:rtype: str
"""
key = self._private_key()
if isinstance(key, EllipticCurvePrivateKey):
return key.curve.name
return None
def save_successor(self, prior_version: int, new_cert: bytes, new_privkey: bytes,
new_chain: bytes, cli_config: configuration.NamespaceConfig) -> int:

View File

@@ -47,7 +47,7 @@ class AnnotatedChallenge(jose.ImmutableMap):
class KeyAuthorizationAnnotatedChallenge(AnnotatedChallenge):
"""Client annotated `KeyAuthorizationChallenge` challenge."""
__slots__ = ('challb', 'domain', 'account_key')
__slots__ = ('challb', 'domain', 'account_key') # pylint: disable=redefined-slots-in-subclass
def response_and_validation(self, *args: Any, **kwargs: Any) -> Any:
"""Generate response and validation."""
@@ -57,5 +57,5 @@ class KeyAuthorizationAnnotatedChallenge(AnnotatedChallenge):
class DNS(AnnotatedChallenge):
"""Client annotated "dns" ACME challenge."""
__slots__ = ('challb', 'domain')
__slots__ = ('challb', 'domain') # pylint: disable=redefined-slots-in-subclass
acme_type = challenges.DNS

View File

@@ -10,7 +10,6 @@ import subprocess
import sys
from typing import Optional
from typing import Tuple
import warnings
from certbot import errors
from certbot.compat import os
@@ -144,8 +143,8 @@ def execute_command_status(cmd_name: str, shell_cmd: str,
subprocess.run(shell=True)
- on Windows command will be run in a Powershell shell
This differs from execute_command: it returns the exit code, and does not log the result
and output of the command.
This function returns the exit code, and does not log the result and output
of the command.
:param str cmd_name: the user facing name of the hook being run
:param str shell_cmd: shell command to execute
@@ -168,36 +167,3 @@ def execute_command_status(cmd_name: str, shell_cmd: str,
# bytes in Python 3
out, err = proc.stdout, proc.stderr
return proc.returncode, err, out
def execute_command(cmd_name: str, shell_cmd: str, env: Optional[dict] = None) -> Tuple[str, str]:
"""
Run a command:
- on Linux command will be run by the standard shell selected with
subprocess.run(shell=True)
- on Windows command will be run in a Powershell shell
This differs from execute_command: it returns the exit code, and does not log the result
and output of the command.
:param str cmd_name: the user facing name of the hook being run
:param str shell_cmd: shell command to execute
:param dict env: environ to pass into subprocess.run
:returns: `tuple` (`str` stderr, `str` stdout)
"""
# Deprecation per https://github.com/certbot/certbot/issues/8854
warnings.warn(
"execute_command will be deprecated in the future, use execute_command_status instead",
PendingDeprecationWarning
)
returncode, err, out = execute_command_status(cmd_name, shell_cmd, env)
base_cmd = os.path.basename(shell_cmd.split(None, 1)[0])
if out:
logger.info('Output from %s command %s:\n%s', cmd_name, base_cmd, out)
if returncode != 0:
logger.error('%s command "%s" returned error code %d',
cmd_name, shell_cmd, returncode)
if err:
logger.error('Error output from %s command %s:\n%s', cmd_name, base_cmd, err)
return err, out

View File

@@ -170,7 +170,11 @@ class NamespaceConfig:
@property
def no_verify_ssl(self) -> bool:
"""Disable verification of the ACME server's certificate."""
"""Disable verification of the ACME server's certificate.
The root certificates trusted by Certbot can be overriden by setting the
REQUESTS_CA_BUNDLE environment variable.
"""
return self.namespace.no_verify_ssl
@property

View File

@@ -15,7 +15,6 @@ from typing import Set
from typing import Tuple
from typing import TYPE_CHECKING
from typing import Union
import warnings
from cryptography import x509
from cryptography.exceptions import InvalidSignature
@@ -35,7 +34,6 @@ import josepy
from OpenSSL import crypto
from OpenSSL import SSL
import pyrfc3339
import zope.component
from acme import crypto_util as acme_crypto_util
from certbot import errors
@@ -100,41 +98,6 @@ def generate_key(key_size: int, key_dir: str, key_type: str = "rsa",
return util.Key(key_path, key_pem)
# TODO: Remove this call once zope dependencies are removed from Certbot.
def init_save_key(key_size: int, key_dir: str, key_type: str = "rsa",
elliptic_curve: str = "secp256r1",
keyname: str = "key-certbot.pem") -> util.Key:
"""Initializes and saves a privkey.
Inits key and saves it in PEM format on the filesystem.
.. note:: keyname is the attempted filename, it may be different if a file
already exists at the path.
.. deprecated:: 1.16.0
Use :func:`generate_key` instead.
:param int key_size: key size in bits if key size is rsa.
:param str key_dir: Key save directory.
:param str key_type: Key Type [rsa, ecdsa]
:param str elliptic_curve: Name of the elliptic curve if key type is ecdsa.
:param str keyname: Filename of key
:returns: Key
:rtype: :class:`certbot.util.Key`
:raises ValueError: If unable to generate the key given key_size.
"""
warnings.warn("certbot.crypto_util.init_save_key is deprecated, please use "
"certbot.crypto_util.generate_key instead.", DeprecationWarning)
config = zope.component.getUtility(interfaces.IConfig)
return generate_key(key_size, key_dir, key_type=key_type, elliptic_curve=elliptic_curve,
keyname=keyname, strict_permissions=config.strict_permissions)
def generate_csr(privkey: util.Key, names: Union[List[str], Set[str]], path: str,
must_staple: bool = False, strict_permissions: bool = True) -> util.CSR:
"""Initialize a CSR with the given private key.
@@ -165,33 +128,6 @@ def generate_csr(privkey: util.Key, names: Union[List[str], Set[str]], path: str
return util.CSR(csr_filename, csr_pem, "pem")
# TODO: Remove this call once zope dependencies are removed from Certbot.
def init_save_csr(privkey: util.Key, names: Set[str], path: str) -> util.CSR:
"""Initialize a CSR with the given private key.
.. deprecated:: 1.16.0
Use :func:`generate_csr` instead.
:param privkey: Key to include in the CSR
:type privkey: :class:`certbot.util.Key`
:param set names: `str` names to include in the CSR
:param str path: Certificate save directory.
:returns: CSR
:rtype: :class:`certbot.util.CSR`
"""
warnings.warn("certbot.crypto_util.init_save_csr is deprecated, please use "
"certbot.crypto_util.generate_csr instead.", DeprecationWarning)
config = zope.component.getUtility(interfaces.IConfig)
return generate_csr(privkey, names, path, must_staple=config.must_staple,
strict_permissions=config.strict_permissions)
# WARNING: the csr and private key file are possible attack vectors for TOCTOU
# We should either...
# A. Do more checks to verify that the CSR is trusted/valid

View File

@@ -181,7 +181,10 @@ def _filter_names(names: Iterable[str],
if override_question:
question = override_question
else:
question = "Which names would you like to activate HTTPS for?"
question = (
"Which names would you like to activate HTTPS for?\n"
"We recommend selecting either all domains, or all domains in a VirtualHost/server "
"block.")
code, names = display_util.checklist(
question, tags=sorted_names, cli_flag="--domains", force_interactive=True)
return code, [str(s) for s in names]

View File

@@ -9,26 +9,12 @@ should be used whenever:
Other messages can use the `logging` module. See `log.py`.
"""
import sys
from types import ModuleType
from typing import Any
from typing import cast
from typing import List
from typing import Optional
from typing import Tuple
from typing import Union
import warnings
from certbot._internal.display import obj
# These specific imports from certbot._internal.display.obj and
# certbot._internal.display.util are done to not break the public API of this
# module.
from certbot._internal.display.obj import FileDisplay # pylint: disable=unused-import
from certbot._internal.display.obj import NoninteractiveDisplay # pylint: disable=unused-import
from certbot._internal.display.obj import SIDE_FRAME # pylint: disable=unused-import
from certbot._internal.display.util import input_with_timeout # pylint: disable=unused-import
from certbot._internal.display.util import separate_list_input # pylint: disable=unused-import
from certbot._internal.display.util import summarize_domain_list # pylint: disable=unused-import
# These constants are defined this way to make them easier to document with
# Sphinx and to not couple our public docstrings to our internal ones.
@@ -38,17 +24,8 @@ OK = obj.OK
CANCEL = obj.CANCEL
"""Display exit code for a user canceling the display."""
# These constants are unused and should be removed in a major release of
# Certbot.
WIDTH = 72
HELP = "help"
"""Display exit code when for when the user requests more help. (UNUSED)"""
ESC = "esc"
"""Display exit code when the user hits Escape (UNUSED)"""
def notify(msg: str) -> None:
"""Display a basic status message.
@@ -204,36 +181,3 @@ def assert_valid_call(prompt: str, default: str, cli_flag: str, force_interactiv
msg += ("\nYou can set an answer to "
"this prompt with the {0} flag".format(cli_flag))
assert default is not None or force_interactive, msg
# This class takes a similar approach to the cryptography project to deprecate attributes
# in public modules. See the _ModuleWithDeprecation class here:
# https://github.com/pyca/cryptography/blob/91105952739442a74582d3e62b3d2111365b0dc7/src/cryptography/utils.py#L129
class _DisplayUtilDeprecationModule:
"""
Internal class delegating to a module, and displaying warnings when attributes
related to deprecated attributes in the certbot.display.util module.
"""
def __init__(self, module: ModuleType) -> None:
self.__dict__['_module'] = module
def __getattr__(self, attr: str) -> Any:
if attr in ('FileDisplay', 'NoninteractiveDisplay', 'SIDE_FRAME', 'input_with_timeout',
'separate_list_input', 'summarize_domain_list', 'WIDTH', 'HELP', 'ESC'):
warnings.warn('{0} attribute in certbot.display.util module is deprecated '
'and will be removed soon.'.format(attr),
DeprecationWarning, stacklevel=2)
return getattr(self._module, attr)
def __setattr__(self, attr: str, value: Any) -> None: # pragma: no cover
setattr(self._module, attr, value)
def __delattr__(self, attr: str) -> None: # pragma: no cover
delattr(self._module, attr)
def __dir__(self) -> List[str]: # pragma: no cover
return ['_module'] + dir(self._module)
# Patching ourselves to warn about deprecation and planned removal of some elements in the module.
sys.modules[__name__] = cast(ModuleType, _DisplayUtilDeprecationModule(sys.modules[__name__]))

View File

@@ -2,23 +2,17 @@
from abc import ABCMeta
from abc import abstractmethod
from argparse import ArgumentParser
import sys
from types import ModuleType
from typing import Any
from typing import Union
from typing import cast
from typing import Iterable
from typing import List
from typing import Optional
from typing import Type
from typing import TYPE_CHECKING
import warnings
import zope.interface
from typing import Union
from acme.challenges import Challenge
from acme.challenges import ChallengeResponse
from acme.client import ClientBase
from acme.client import ClientV2
from certbot import configuration
from certbot.achallenges import AnnotatedChallenge
@@ -53,7 +47,7 @@ class AccountStorage(metaclass=ABCMeta):
raise NotImplementedError()
@abstractmethod
def save(self, account: 'Account', client: ClientBase) -> None: # pragma: no cover
def save(self, account: 'Account', client: ClientV2) -> None: # pragma: no cover
"""Save account.
:raises .AccountStorageError: if account could not be saved
@@ -62,18 +56,6 @@ class AccountStorage(metaclass=ABCMeta):
raise NotImplementedError()
class IConfig(zope.interface.Interface): # pylint: disable=inherit-non-class
"""Deprecated, use certbot.configuration.NamespaceConfig instead."""
class IPluginFactory(zope.interface.Interface): # pylint: disable=inherit-non-class
"""Deprecated, use certbot.interfaces.Plugin as ABC instead."""
class IPlugin(zope.interface.Interface): # pylint: disable=inherit-non-class
"""Deprecated, use certbot.interfaces.Plugin as ABC instead."""
class Plugin(metaclass=ABCMeta):
"""Certbot plugin.
@@ -168,10 +150,6 @@ class Plugin(metaclass=ABCMeta):
"""
class IAuthenticator(IPlugin): # pylint: disable=inherit-non-class
"""Deprecated, use certbot.interfaces.Authenticator as ABC instead."""
class Authenticator(Plugin):
"""Generic Certbot Authenticator.
@@ -231,10 +209,6 @@ class Authenticator(Plugin):
"""
class IInstaller(IPlugin): # pylint: disable=inherit-non-class
"""Deprecated, use certbot.interfaces.Installer as ABC instead."""
class Installer(Plugin):
"""Generic Certbot Installer Interface.
@@ -362,14 +336,6 @@ class Installer(Plugin):
"""
class IDisplay(zope.interface.Interface): # pylint: disable=inherit-non-class
"""Deprecated, use your own Display implementation instead."""
class IReporter(zope.interface.Interface): # pylint: disable=inherit-non-class
"""Deprecated, use your own Reporter implementation instead."""
class RenewableCert(metaclass=ABCMeta):
"""Interface to a certificate lineage."""
@@ -499,36 +465,3 @@ class RenewDeployer(metaclass=ABCMeta):
:type lineage: RenewableCert
"""
# This class takes a similar approach to the cryptography project to deprecate attributes
# in public modules. See the _ModuleWithDeprecation class here:
# https://github.com/pyca/cryptography/blob/91105952739442a74582d3e62b3d2111365b0dc7/src/cryptography/utils.py#L129
class _ZopeInterfacesDeprecationModule:
"""
Internal class delegating to a module, and displaying warnings when
attributes related to Zope interfaces are accessed.
"""
def __init__(self, module: ModuleType) -> None:
self.__dict__['_module'] = module
def __getattr__(self, attr: str) -> None:
if attr in ('IConfig', 'IPlugin', 'IPluginFactory', 'IAuthenticator',
'IInstaller', 'IDisplay', 'IReporter'):
warnings.warn('{0} attribute in certbot.interfaces module is deprecated '
'and will be removed soon.'.format(attr),
DeprecationWarning, stacklevel=2)
return getattr(self._module, attr)
def __setattr__(self, attr: str, value: Any) -> None: # pragma: no cover
setattr(self._module, attr, value)
def __delattr__(self, attr: str) -> None: # pragma: no cover
delattr(self._module, attr)
def __dir__(self) -> List[str]: # pragma: no cover
return ['_module'] + dir(self._module)
# Patching ourselves to warn about Zope interfaces deprecation and planned removal.
sys.modules[__name__] = cast(ModuleType, _ZopeInterfacesDeprecationModule(sys.modules[__name__]))

View File

@@ -38,14 +38,12 @@ class _AuthenticatorCallableTestCase(Protocol):
See
https://docs.python.org/3/library/unittest.html#unittest.TestCase.assertTrue
"""
...
def assertEqual(self, *unused_args: Any) -> None:
"""
See
https://docs.python.org/3/library/unittest.html#unittest.TestCase.assertEqual
"""
...
class BaseAuthenticatorTest:

Some files were not shown because too many files have changed in this diff Show More