Skip to content
Snippets Groups Projects
Commit c7f2235a authored by nimrod's avatar nimrod
Browse files

- Initial commit.

parents
No related branches found
No related tags found
No related merge requests found
~*
*~
*.sw[op]
*.py[cod]
.DS_Store
__pycache__/
.vagrant/
vendor/
Thumbs.db
*.retry
.svn/
.sass-cache/
*.log
a.out
node-modules/
nbproject/
*.ipynb
.idea/
*.egg-info/
*.o
.classpath
.cache/
bower_components/
*.class
*.jar
secring.*
.*.kate-swp
.swp.*
.directory
.Trash-*
build/
dist/
MIT License
Copyright (c) 2016 Adar Nimrod
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
recursive-include check_s3_bucket *.py
exclude .pre-commit-config.yaml
include *.rst
include VERSION
include *.txt
check_s3_bucket
###############
Check that a filename matching the regex was added to the bucket in the given time window.
Usage
-----
.. code:: shell
$ check_s3_bucket --help
usage: check_s3_bucket [-h]
bucket [regex] [warning_threshold] [critical_threshold]
Check that a filename matching the regex was added to the bucket in the given
time window.
positional arguments:
bucket S3 bucket to check
regex Filename regex to check (defaults to *)
warning_threshold Warning threshold in hours (defaults to 25)
critical_threshold Critical threshold in hours (defaults to 49)
optional arguments:
-h, --help show this help message and exit
License
-------
This software is licensed under the MIT license (see the :code:`LICENSE.txt`
file).
Author
------
Nimrod Adar, `contact me <nimrod@shore.co.il>`_ or visit my `website
<https://www.shore.co.il/>`_. Patches are welcome via `git send-email
<http://git-scm.com/book/en/v2/Git-Commands-Email>`_. The repository is located
at: https://www.shore.co.il/git/.
0.1.0
#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import datetime
import re
import argparse
try:
import botocore.session
except ImportError:
print('Failed to import botocore.')
exit(3)
try:
from dateutil.tz import tzlocal
except ImportError:
print('Failed to import dateutil.')
exit(3)
def getFileList(bucket):
session = botocore.session.get_session()
s3client = session.create_client('s3')
# I'm not concerened with the limitation of number of keys in the
# response as the buckets have a lifecycle rule enabled and files are
# automatically moved of the bucket.
response = s3client.list_objects(Bucket=bucket)
return response['Contents']
def main():
parser = argparse.ArgumentParser(
description='''Check that a filename matching the regex was added to the
bucket in the given time window.''')
parser.add_argument('bucket', help='S3 bucket to check')
parser.add_argument('regex',
help='Filename regex to check (defaults to *)',
nargs='?',
default='*')
parser.add_argument('warning_threshold',
help='Warning threshold in hours (defaults to 25)',
default=24,
type=int,
nargs='?')
parser.add_argument('critical_threshold',
help='Critical threshold in hours (defaults to 49)',
default=48,
type=int,
nargs='?')
args = parser.parse_args()
try:
filelist = getFileList(args.bucket)
except BaseException as e:
print('Failed to list file in bucket.')
exit(3)
if args.regex != '*':
p = re.compile(args.regex)
filelist = filter(lambda x: p.search(x['Key']) is not None, filelist)
if len(filelist) == 0:
print('No files matching "{}" found in {}.'.format(args.regex,
args.bucket))
exit(1)
now = datetime.datetime.now(tz=tzlocal())
LastModifiedDeltas = map(
lambda x: int((now - x['LastModified']).total_seconds() / 3600),
filelist)
LastModifiedDeltas.sort()
delta = LastModifiedDeltas[0]
if delta >= args.critical_threshold:
print('Last file modified is older than {} hours.'.format(
args.critical_threshold))
exit(2)
elif delta >= args.warning_threshold:
print('Last file modified is older than {} hours.'.format(
args.warning_threshold))
exit(1)
else:
print('Last file modified is newer than {} hours.'.format(
args.warning_threshold))
exit(0)
if __name__ == '__main__':
main()
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from fabric.api import (local, task, sudo, env, settings)
env.use_ssh_config = True
@task
def build():
'''Build wheel.'''
local('''python setup.py sdist bdist_wheel''')
@task
def clean():
'''Clean cache, build files.'''
local('''rm -rf *.egg-info build dist''')
local('''find -name *.pyc -delete''')
local('''find -name __pycache__ -type d -delete''')
@task
def upload():
'''Build and upload to PyPI.'''
build()
local('''twine upload -s dist/*''')
[bdist_wheel]
universal=1
[flake8]
exclude = .tox,*.egg,build,data
select = E,W,F
setup.py 0 → 100644
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='check_s3_bucket',
version=open('VERSION', 'r').read(),
description='''Check that a filename matching the regex was added to the
bucket in the given time window.''',
long_description=open('README.rst', 'r').read(),
url='https://www.shore.co.il/git/check_s3_bucket',
author='Nimrod Adar',
author_email='nimrod@shore.co.il',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: System Administrators',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 2', 'Topic :: Utilities',
'License :: OSI Approved :: MIT License'
],
keywords='nagios s3 aws monitoring',
packages=find_packages(),
install_requires=['python-dateutil', 'botocore'],
entry_points={
'console_scripts': [
'check_s3_bucket=check_s3_bucket:main'
],
}, )
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment