Commit b47064e2 by Karel Meriste

add project

parents
Showing with 2421 additions and 0 deletions

Too many changes to show.

To preserve performance only 1000 of 1000+ files are displayed.

# Created by .ignore support plugin (hsz.mobi)
### Python template
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
env/
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
*.egg-info/
.installed.cfg
*.egg
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*,cover
.hypothesis/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# IPython Notebook
.ipynb_checkpoints
# pyenv
.python-version
# celery beat schedule file
celerybeat-schedule
# dotenv
.env
# virtualenv
venv/
ENV/
# Spyder project settings
.spyderproject
# Rope project settings
.ropeproject
### VirtualEnv template
# Virtualenv
# http://iamzed.com/2009/05/07/a-primer-on-virtualenv/
.Python
[Bb]in
[Ii]nclude
[Ll]ib
[Ll]ib64
[Ll]ocal
[Ss]cripts
pyvenv.cfg
.venv
pip-selfcheck.json
### JetBrains template
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
# User-specific stuff:
.idea/workspace.xml
.idea/tasks.xml
.idea/dictionaries
.idea/vcs.xml
.idea/jsLibraryMappings.xml
# Sensitive or high-churn files:
.idea/dataSources.ids
.idea/dataSources.xml
.idea/dataSources.local.xml
.idea/sqlDataSources.xml
.idea/dynamic.xml
.idea/uiDesigner.xml
# Gradle:
.idea/gradle.xml
.idea/libraries
# Mongo Explorer plugin:
.idea/mongoSettings.xml
.idea/
## File-based project format:
*.iws
## Plugin-specific files:
# IntelliJ
/out/
# mpeltonen/sbt-idea plugin
.idea_modules/
# JIRA plugin
atlassian-ide-plugin.xml
# Crashlytics plugin (for Android Studio and IntelliJ)
com_crashlytics_export_strings.xml
crashlytics.properties
crashlytics-build.properties
fabric.properties
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$">
<excludeFolder url="file://$MODULE_DIR$/venv" />
</content>
<orderEntry type="jdk" jdkName="Python 2.7 (datalogger venv)" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" />
<orderEntry type="library" name="R Skeletons" level="application" />
<orderEntry type="library" name="R User Library" level="project" />
</component>
<component name="PyDocumentationSettings">
<option name="renderExternalDocumentation" value="true" />
</component>
<component name="TestRunnerService">
<option name="PROJECT_TEST_RUNNER" value="Unittests" />
</component>
</module>
\ No newline at end of file
<component name="libraryTable">
<library name="R User Library">
<CLASSES />
<SOURCES />
</library>
</component>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="JavaScriptSettings">
<option name="languageLevel" value="ES6" />
</component>
<component name="ProjectRootManager" version="2" project-jdk-name="Python 2.7 (datalogger venv)" project-jdk-type="Python SDK" />
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/datalogger.iml" filepath="$PROJECT_DIR$/.idea/datalogger.iml" />
</modules>
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="PySciProjectComponent">
<option name="PY_SCI_VIEW" value="true" />
<option name="PY_SCI_VIEW_SUGGESTED" value="true" />
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$" vcs="Git" />
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$">
<excludeFolder url="file://$MODULE_DIR$/venv" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
<component name="PyDocumentationSettings">
<option name="renderExternalDocumentation" value="true" />
</component>
<component name="TestRunnerService">
<option name="projectConfiguration" value="Nosetests" />
<option name="PROJECT_TEST_RUNNER" value="Nosetests" />
</component>
</module>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="JavaScriptSettings">
<option name="languageLevel" value="ES6" />
</component>
<component name="ProjectRootManager" version="2" project-jdk-name="Python 2.7 (filter)" project-jdk-type="Python SDK" />
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/filter.iml" filepath="$PROJECT_DIR$/.idea/filter.iml" />
</modules>
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="PySciProjectComponent">
<option name="PY_SCI_VIEW" value="true" />
<option name="PY_SCI_VIEW_SUGGESTED" value="true" />
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$" vcs="Git" />
</component>
</project>
\ No newline at end of file
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly
deactivate () {
unset -f pydoc >/dev/null 2>&1
# reset old environment variables
# ! [ -z ${VAR+_} ] returns true if VAR is declared at all
if ! [ -z "${_OLD_VIRTUAL_PATH+_}" ] ; then
PATH="$_OLD_VIRTUAL_PATH"
export PATH
unset _OLD_VIRTUAL_PATH
fi
if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
export PYTHONHOME
unset _OLD_VIRTUAL_PYTHONHOME
fi
# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands. Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
hash -r 2>/dev/null
fi
if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
PS1="$_OLD_VIRTUAL_PS1"
export PS1
unset _OLD_VIRTUAL_PS1
fi
unset VIRTUAL_ENV
if [ ! "${1-}" = "nondestructive" ] ; then
# Self destruct!
unset -f deactivate
fi
}
# unset irrelevant variables
deactivate nondestructive
VIRTUAL_ENV="/home/iseauto/PycharmProjects/filter/venv"
export VIRTUAL_ENV
_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH
# unset PYTHONHOME if set
if ! [ -z "${PYTHONHOME+_}" ] ; then
_OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
unset PYTHONHOME
fi
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
_OLD_VIRTUAL_PS1="$PS1"
if [ "x" != x ] ; then
PS1="$PS1"
else
PS1="(`basename \"$VIRTUAL_ENV\"`) $PS1"
fi
export PS1
fi
# Make sure to unalias pydoc if it's already there
alias pydoc 2>/dev/null >/dev/null && unalias pydoc
pydoc () {
python -m pydoc "$@"
}
# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands. Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then
hash -r 2>/dev/null
fi
# This file must be used with "source bin/activate.csh" *from csh*.
# You cannot run it directly.
# Created by Davide Di Blasi <davidedb@gmail.com>.
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate && unalias pydoc'
# Unset irrelevant variables.
deactivate nondestructive
setenv VIRTUAL_ENV "/home/iseauto/PycharmProjects/filter/venv"
set _OLD_VIRTUAL_PATH="$PATH"
setenv PATH "$VIRTUAL_ENV/bin:$PATH"
if ("" != "") then
set env_name = ""
else
set env_name = `basename "$VIRTUAL_ENV"`
endif
# Could be in a non-interactive environment,
# in which case, $prompt is undefined and we wouldn't
# care about the prompt anyway.
if ( $?prompt ) then
set _OLD_VIRTUAL_PROMPT="$prompt"
set prompt = "[$env_name] $prompt"
endif
unset env_name
alias pydoc python -m pydoc
rehash
# This file must be used using `. bin/activate.fish` *within a running fish ( http://fishshell.com ) session*.
# Do not run it directly.
function deactivate -d 'Exit virtualenv mode and return to the normal environment.'
# reset old environment variables
if test -n "$_OLD_VIRTUAL_PATH"
set -gx PATH $_OLD_VIRTUAL_PATH
set -e _OLD_VIRTUAL_PATH
end
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
set -e _OLD_VIRTUAL_PYTHONHOME
end
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
# Set an empty local `$fish_function_path` to allow the removal of `fish_prompt` using `functions -e`.
set -l fish_function_path
# Erase virtualenv's `fish_prompt` and restore the original.
functions -e fish_prompt
functions -c _old_fish_prompt fish_prompt
functions -e _old_fish_prompt
set -e _OLD_FISH_PROMPT_OVERRIDE
end
set -e VIRTUAL_ENV
if test "$argv[1]" != 'nondestructive'
# Self-destruct!
functions -e pydoc
functions -e deactivate
end
end
# Unset irrelevant variables.
deactivate nondestructive
set -gx VIRTUAL_ENV "/home/iseauto/PycharmProjects/filter/venv"
set -gx _OLD_VIRTUAL_PATH $PATH
set -gx PATH "$VIRTUAL_ENV/bin" $PATH
# Unset `$PYTHONHOME` if set.
if set -q PYTHONHOME
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
set -e PYTHONHOME
end
function pydoc
python -m pydoc $argv
end
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
# Copy the current `fish_prompt` function as `_old_fish_prompt`.
functions -c fish_prompt _old_fish_prompt
function fish_prompt
# Save the current $status, for fish_prompts that display it.
set -l old_status $status
# Prompt override provided?
# If not, just prepend the environment name.
if test -n ""
printf '%s%s' "" (set_color normal)
else
printf '%s(%s) ' (set_color normal) (basename "$VIRTUAL_ENV")
end
# Restore the original $status
echo "exit $old_status" | source
_old_fish_prompt
end
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
end
"""By using execfile(this_file, dict(__file__=this_file)) you will
activate this virtualenv environment.
This can be used when you must use an existing Python interpreter, not
the virtualenv bin/python
"""
try:
__file__
except NameError:
raise AssertionError(
"You must run this like execfile('path/to/activate_this.py', dict(__file__='path/to/activate_this.py'))")
import sys
import os
old_os_path = os.environ.get('PATH', '')
os.environ['PATH'] = os.path.dirname(os.path.abspath(__file__)) + os.pathsep + old_os_path
base = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
if sys.platform == 'win32':
site_packages = os.path.join(base, 'Lib', 'site-packages')
else:
site_packages = os.path.join(base, 'lib', 'python%s' % sys.version[:3], 'site-packages')
prev_sys_path = list(sys.path)
import site
site.addsitedir(site_packages)
sys.real_prefix = sys.prefix
sys.prefix = base
# Move the added items to the front of the path:
new_sys_path = []
for item in list(sys.path):
if item not in prev_sys_path:
new_sys_path.append(item)
sys.path.remove(item)
sys.path[:0] = new_sys_path
import json
from elastic import post_doc
def extract_wheel_angle(message):
data1 = int(message.data[0:1],16)
data2 = int(message.data[2:3],16)
timestamp = int(message.timestamp,16)
st_wheel_angle = (data1 * 256 + data2 - 4096)/2
steering = Steering(st_wheel_angle, message.id, message.ros_timestamp, timestamp)
document = json.dumps(steering.__dict__)
return document
#post_doc(document,message.ros_timestamp)
class Steering:
def __init__(self, wheel_angle, id, ros_timestamp, timestamp):
self.wheel_angle = wheel_angle
self.id = id
self.ros_timestamp = ros_timestamp
self.timestamp = timestamp
#!/home/iseauto/PycharmProjects/filter/venv/bin/python2.7
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
#!/home/iseauto/PycharmProjects/filter/venv/bin/python2.7
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
import json
from elasticsearch import Elasticsearch
import time
es = Elasticsearch([{'host': 'localhost', 'port': 9200}])
if es.ping():
print "Connection established to elasticsearch"
else:
print "Elasticsearch client connection failed"
def delete_index(index_name):
if es.indices.exists(index_name):
res = es.indices.delete(index=index_name)
print(" response: '%s'" % (res))
def create_new_index(request_body, index_name):
""" #request body example
request_body = {
"settings" : {
"number_of_shards" : 2,
"number_of_replicas" : 0
}
}
"""
res = es.indices.create(index=index_name, body=request_body)
print " response: '%s'" % res
def post_doc(body):
es.index(index='car_metrics', doc_type='metrics', body=body)
def get_doc(id):
print(es.get(index='car_metrics', doc_type='metrics', id=id))
def post_bulk(bulk_data):
print("bulk indexing...")
start_time = time.time()
res = es.bulk(index="car_metrics", body=bulk_data, refresh=True)
finish_time = time.time()
print "time spent indexing: ", finish_time - start_time
def create_mapping(index, doc_type, body):
es.indices.put_mapping(
index=index,
doc_type=doc_type,
body=body
)
if __name__ == "__main__":
create_new_index({
"settings": {
"number_of_shards": 2,
"number_of_replicas": 0
}
}, "car_metric")
from message import parse_message
import binascii
from mongodb import close_mongo_connection
print __name__
def jsonConver(address):
with open(address, "rb") as binary_file:
data = binary_file.read()
hex_data = binascii.hexlify(data)
parse_message(hex_data)
if __name__ in ["__main__", "__builtin__"]:
address = "/home/k2/development/Iseauto/datalogger/filter/venv/local_logs/201808241429.log"
jsonConver(address)
close_mongo_connection()
import time
import logging
import logFilter
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
TIMEOUT = 60 #time for generating log file
class MyHandler(FileSystemEventHandler):
def on_created(self, event):
if event.src_path.lower().endswith('.log'):
time.sleep(60)
print "new log ", event.src_path
logFilter.jsonConver(event.src_path)
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO,
format='%(asctime)s - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
path = "/home/iseauto/Desktop/vehicle_listener_log"
event_handler = MyHandler()
observer = Observer()
observer.schedule(event_handler, path, recursive=True)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
from mongodb import post_message
import time
import json
from elastic import post_bulk
from elastic import create_mapping
CAN_ID_LENGTH = 4
CAN_TIMESTAMP_LENGTH = 12
CAN_DATA_LENGTH = 16
ROS_TIMESTAMP_LENGTH = 12
UDP_VERSION_LENGTH = 2
UDP_TYPE_LENGTH = 2
UDP_MSG_LENGTH = 4
UDP_VERSION_2 = 0x02
UDP_DATA_INFO_LENGTH = 4
UDP_ENDBYTE_LENGHT = 2
CAN_DATAFIELD_LENGTH = 2
MSG_COUNT = 22
CAN_ID_STEERING = 0x236
class CanMessage:
def __init__(self, timestamp, can_id):
self.timestamp = timestamp
self.id = can_id
self.data1 = []
self.data2 = []
self.data3 = []
self.data4 = []
self.data5 = []
self.data6 = []
self.data7 = []
self.data8 = []
self.ros_timestamp = []
def add_data(self, D1,D2,D3,D4,D5,D6,D7,D8):
self.data1 = D1
self.data2 = D2
self.data3 = D3
self.data4 = D4
self.data5 = D5
self.data6 = D6
self.data7 = D7
self.data8 = D8
def add_ros_timestamp(self, ros_timestamp):
self.ros_timestamp = ros_timestamp
def extract_timestamp(data, p):
"""
:param data: log file to be parsed
:param p: pointer to the timestamp in log file
:return: ROS timestamp from log file
"""
timestamp = data[p:p + ROS_TIMESTAMP_LENGTH]
timestamp = to_small_endian(timestamp)
p = p + 16
timestamp = int(timestamp, 16)
return timestamp, p
def extract_message(data, p):
"""
:param data: log file to be parsed
:param p: pointer to the beginning of CAN message in log file
:return: can message id, can message timestamp, can message data, pointer to the end of can message in log
"""
message_id = data[p:p + CAN_ID_LENGTH]
message_id = to_small_endian(message_id)
message_id = message_id[1:4]
message_id = int(message_id, 16)
p = p + CAN_ID_LENGTH
timestamp_low = data[p:p + 4] # sizeof uint16_t
p = p + 4
timestamp_low = to_small_endian(timestamp_low)
timestamp_low = timestamp_low[1:4] # ignore the first nibble
timestamp_low = int(timestamp_low, 16)
timestamp_high = data[p:p + 8] # sizeof uint32_t
p = p + 8
timestamp_high = to_small_endian(timestamp_high)
timestamp_high = int(timestamp_high, 16)
message_timestamp = timestamp_high * 1000 + timestamp_low
D1, D2, D3, D4, D5, D6, D7, D8, p = extract_can_data(data,p)
return message_id, message_timestamp, D1, D2, D3, D4, D5, D6, D7, D8, p
def extract_can_data(data,p):
D1 = data[p:p + CAN_DATAFIELD_LENGTH]
p = p + CAN_DATAFIELD_LENGTH
D2 = data[p:p + CAN_DATAFIELD_LENGTH]
p = p + CAN_DATAFIELD_LENGTH
D3 = data[p:p + CAN_DATAFIELD_LENGTH]
p = p + CAN_DATAFIELD_LENGTH
D4 = data[p:p + CAN_DATAFIELD_LENGTH]
p = p + CAN_DATAFIELD_LENGTH
D5 = data[p:p + CAN_DATAFIELD_LENGTH]
p = p + CAN_DATAFIELD_LENGTH
D6 = data[p:p + CAN_DATAFIELD_LENGTH]
p = p + CAN_DATAFIELD_LENGTH
D7 = data[p:p + CAN_DATAFIELD_LENGTH]
p = p + CAN_DATAFIELD_LENGTH
D8 = data[p:p + CAN_DATAFIELD_LENGTH]
p = p + CAN_DATAFIELD_LENGTH
return D1, D2, D3, D4, D5, D6, D7, D8, p
def to_small_endian(data):
"""
:param data: big endian hex string
:return: small endian hex string
"""
data = [data[b:b + 2] for b in range(0, len(data), 2)]
data.reverse()
data = ''.join(data)
return data
def hex_to_int(hex_str):
return int(hex_str, 16)
def extract_data_header(data, p):
udp_version = data[p:p + UDP_VERSION_LENGTH]
udp_version = to_small_endian(udp_version)
udp_version = hex_to_int(udp_version)
p = p + UDP_VERSION_LENGTH
if udp_version == UDP_VERSION_2:
udp_type = data[p:p + UDP_TYPE_LENGTH]
udp_type = to_small_endian(udp_type)
udp_type = hex_to_int(udp_type)
p = p + UDP_TYPE_LENGTH
udp_data_length = data[p:p + UDP_MSG_LENGTH]
udp_data_length = to_small_endian(udp_data_length)
udp_data_length = hex_to_int(udp_data_length)
p = p + UDP_MSG_LENGTH
else:
udp_type = 0
udp_data_length = 0
return udp_version, udp_type, udp_data_length, p
def extract_endbyte(p, data):
endbyte = data[p:p + 2]
p = p + 2
return endbyte, p
def put_mapping():
index = "car_metric"
doc_type = "metrics"
body = {
"properties": {
"@timestamp": {"type": "date",
"format": "epoch_millis"},
"D1": {"type": "long"},
"D2": {"type": "long"},
"D3": {"type": "long"},
"D4": {"type": "long"},
"D5": {"type": "long"},
"D6": {"type": "long"},
"D7": {"type": "long"},
"D8": {"type": "long"},
"id": {"type": "long"},
"ros_timestamp": {"type": "date",
"format": "epoch_millis"}
}
}
create_mapping(index, doc_type, body)
def parse_message(hex_data):
"""
Parses given log and extracts vehicle parameters. Sends can messages contained in the log to MongoDb as separate
documents and sends known vehicle metrics to be indexed in elasticsearch
:param hex_data: log data as string in hexadecimal format
"""
p = 0
elastic_json = []
size = len(hex_data)
op_dict = {
"index": {
"_index": "car_metric",
"_type": "metrics",
}
}
time1 = time.time()
print "parsing log..."
can_message_list = []
can_messages = {}
while p < size:
ros_timestamp, p = extract_timestamp(hex_data, p)
udp_version, udp_type, udp_data_length, p = extract_data_header(hex_data, p)
i = 0
while i < MSG_COUNT:
message_id, message_timestamp, D1, D2, D3, D4, D5, D6, D7, D8, p = extract_message(hex_data, p)
message = CanMessage(message_timestamp, message_id) # type: CanMessage
message.add_data(D1, D2, D3, D4, D5, D6, D7, D8)
message.add_ros_timestamp(ros_timestamp)
msg_entry = can_messages.get(message_id) # type: CanMessage
if msg_entry is None:
can_messages[message_id] = message
else:
if msg_entry.timestamp != message.timestamp:
can_messages[message_id] = message
can_message = {'@timestamp': message_timestamp, 'D1': D1, 'D2': D2, 'D3': D3, 'D4': D4,
'D5': D5, 'D6': D6, 'D7': D7, 'D8': D8, 'id': message_id,
'ros_timestamp': ros_timestamp}
can_message_list.append(dict(can_message))
can_message = json.dumps(can_message)
can_message = json.loads(can_message)
elastic_json.append(op_dict)
elastic_json.append(can_message)
i = i + 1
endbyte, p = extract_endbyte(p, hex_data)
put_mapping()
print endbyte
time2 = time.time()
print "time spent parsing log: ", time2 - time1
post_message(can_message_list)
print "number of messages logged:", len(can_message_list)
post_bulk(elastic_json)
from pymongo import MongoClient
import time
client = MongoClient('localhost', 27017) # type: MongoClient
db = client.auto
collection = db.can
posts = db.posts
def post_message(post):
print "posting to MongoDB..."
start_time = time.time()
posts_id = collection.insert_many(post)
finish_time = time.time()
print "time spent posting: ", finish_time - start_time
def post_single_message(post):
posts_id = collection.insert_one(post)
def close_mongo_connection():
print "closing mongo connection:"
client.close()
#!/home/iseauto/PycharmProjects/filter/venv/bin/python2.7
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
#!/home/iseauto/PycharmProjects/filter/venv/bin/python2.7
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
#!/home/iseauto/PycharmProjects/filter/venv/bin/python2.7
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
python2.7
\ No newline at end of file
#!/home/iseauto/PycharmProjects/filter/venv/bin/python
import sys
import getopt
import sysconfig
valid_opts = ['prefix', 'exec-prefix', 'includes', 'libs', 'cflags',
'ldflags', 'help']
if sys.version_info >= (3, 2):
valid_opts.insert(-1, 'extension-suffix')
valid_opts.append('abiflags')
if sys.version_info >= (3, 3):
valid_opts.append('configdir')
def exit_with_usage(code=1):
sys.stderr.write("Usage: {0} [{1}]\n".format(
sys.argv[0], '|'.join('--'+opt for opt in valid_opts)))
sys.exit(code)
try:
opts, args = getopt.getopt(sys.argv[1:], '', valid_opts)
except getopt.error:
exit_with_usage()
if not opts:
exit_with_usage()
pyver = sysconfig.get_config_var('VERSION')
getvar = sysconfig.get_config_var
opt_flags = [flag for (flag, val) in opts]
if '--help' in opt_flags:
exit_with_usage(code=0)
for opt in opt_flags:
if opt == '--prefix':
print(sysconfig.get_config_var('prefix'))
elif opt == '--exec-prefix':
print(sysconfig.get_config_var('exec_prefix'))
elif opt in ('--includes', '--cflags'):
flags = ['-I' + sysconfig.get_path('include'),
'-I' + sysconfig.get_path('platinclude')]
if opt == '--cflags':
flags.extend(getvar('CFLAGS').split())
print(' '.join(flags))
elif opt in ('--libs', '--ldflags'):
abiflags = getattr(sys, 'abiflags', '')
libs = ['-lpython' + pyver + abiflags]
libs += getvar('LIBS').split()
libs += getvar('SYSLIBS').split()
# add the prefix/lib/pythonX.Y/config dir, but only if there is no
# shared library in prefix/lib/.
if opt == '--ldflags':
if not getvar('Py_ENABLE_SHARED'):
libs.insert(0, '-L' + getvar('LIBPL'))
if not getvar('PYTHONFRAMEWORK'):
libs.extend(getvar('LINKFORSHARED').split())
print(' '.join(libs))
elif opt == '--extension-suffix':
ext_suffix = sysconfig.get_config_var('EXT_SUFFIX')
if ext_suffix is None:
ext_suffix = sysconfig.get_config_var('SO')
print(ext_suffix)
elif opt == '--abiflags':
if not getattr(sys, 'abiflags', None):
exit_with_usage()
print(sys.abiflags)
elif opt == '--configdir':
print(sysconfig.get_config_var('LIBPL'))
python2.7
\ No newline at end of file
from elasticsearch import Elasticsearch
#!/home/iseauto/PycharmProjects/filter/venv/bin/python2.7
# -*- coding: utf-8 -*-
import re
import sys
from wheel.tool import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
/usr/include/python2.7
\ No newline at end of file
/usr/lib/python2.7/UserDict.py
\ No newline at end of file
/usr/lib/python2.7/_abcoll.py
\ No newline at end of file
/usr/lib/python2.7/_weakrefset.py
\ No newline at end of file
/usr/lib/python2.7/abc.py
\ No newline at end of file
/usr/lib/python2.7/codecs.py
\ No newline at end of file
/usr/lib/python2.7/copy_reg.py
\ No newline at end of file
import os
import sys
import warnings
import imp
import opcode # opcode is not a virtualenv module, so we can use it to find the stdlib
# Important! To work on pypy, this must be a module that resides in the
# lib-python/modified-x.y.z directory
dirname = os.path.dirname
distutils_path = os.path.join(os.path.dirname(opcode.__file__), 'distutils')
if os.path.normpath(distutils_path) == os.path.dirname(os.path.normpath(__file__)):
warnings.warn(
"The virtualenv distutils package at %s appears to be in the same location as the system distutils?")
else:
__path__.insert(0, distutils_path)
real_distutils = imp.load_module("_virtualenv_distutils", None, distutils_path, ('', '', imp.PKG_DIRECTORY))
# Copy the relevant attributes
try:
__revision__ = real_distutils.__revision__
except AttributeError:
pass
__version__ = real_distutils.__version__
from distutils import dist, sysconfig
try:
basestring
except NameError:
basestring = str
## patch build_ext (distutils doesn't know how to get the libs directory
## path on windows - it hardcodes the paths around the patched sys.prefix)
if sys.platform == 'win32':
from distutils.command.build_ext import build_ext as old_build_ext
class build_ext(old_build_ext):
def finalize_options (self):
if self.library_dirs is None:
self.library_dirs = []
elif isinstance(self.library_dirs, basestring):
self.library_dirs = self.library_dirs.split(os.pathsep)
self.library_dirs.insert(0, os.path.join(sys.real_prefix, "Libs"))
old_build_ext.finalize_options(self)
from distutils.command import build_ext as build_ext_module
build_ext_module.build_ext = build_ext
## distutils.dist patches:
old_find_config_files = dist.Distribution.find_config_files
def find_config_files(self):
found = old_find_config_files(self)
system_distutils = os.path.join(distutils_path, 'distutils.cfg')
#if os.path.exists(system_distutils):
# found.insert(0, system_distutils)
# What to call the per-user config file
if os.name == 'posix':
user_filename = ".pydistutils.cfg"
else:
user_filename = "pydistutils.cfg"
user_filename = os.path.join(sys.prefix, user_filename)
if os.path.isfile(user_filename):
for item in list(found):
if item.endswith('pydistutils.cfg'):
found.remove(item)
found.append(user_filename)
return found
dist.Distribution.find_config_files = find_config_files
## distutils.sysconfig patches:
old_get_python_inc = sysconfig.get_python_inc
def sysconfig_get_python_inc(plat_specific=0, prefix=None):
if prefix is None:
prefix = sys.real_prefix
return old_get_python_inc(plat_specific, prefix)
sysconfig_get_python_inc.__doc__ = old_get_python_inc.__doc__
sysconfig.get_python_inc = sysconfig_get_python_inc
old_get_python_lib = sysconfig.get_python_lib
def sysconfig_get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
if standard_lib and prefix is None:
prefix = sys.real_prefix
return old_get_python_lib(plat_specific, standard_lib, prefix)
sysconfig_get_python_lib.__doc__ = old_get_python_lib.__doc__
sysconfig.get_python_lib = sysconfig_get_python_lib
old_get_config_vars = sysconfig.get_config_vars
def sysconfig_get_config_vars(*args):
real_vars = old_get_config_vars(*args)
if sys.platform == 'win32':
lib_dir = os.path.join(sys.real_prefix, "libs")
if isinstance(real_vars, dict) and 'LIBDIR' not in real_vars:
real_vars['LIBDIR'] = lib_dir # asked for all
elif isinstance(real_vars, list) and 'LIBDIR' in args:
real_vars = real_vars + [lib_dir] # asked for list
return real_vars
sysconfig_get_config_vars.__doc__ = old_get_config_vars.__doc__
sysconfig.get_config_vars = sysconfig_get_config_vars
# This is a config file local to this virtualenv installation
# You may include options that will be used by all distutils commands,
# and by easy_install. For instance:
#
# [easy_install]
# find_links = http://mylocalsite
/usr/lib/python2.7/encodings
\ No newline at end of file
/usr/lib/python2.7/fnmatch.py
\ No newline at end of file
/usr/lib/python2.7/genericpath.py
\ No newline at end of file
/usr/lib/python2.7/lib-dynload
\ No newline at end of file
/usr/lib/python2.7/linecache.py
\ No newline at end of file
/usr/lib/python2.7/locale.py
\ No newline at end of file
/usr/lib/python2.7/ntpath.py
\ No newline at end of file
/usr
\ No newline at end of file
/usr/lib/python2.7/os.py
\ No newline at end of file
/usr/lib/python2.7/posixpath.py
\ No newline at end of file
/usr/lib/python2.7/re.py
\ No newline at end of file
# Copyright 2009-present MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from uuid import UUID
from bson.py3compat import PY3
"""Tools for representing BSON binary data.
"""
BINARY_SUBTYPE = 0
"""BSON binary subtype for binary data.
This is the default subtype for binary data.
"""
FUNCTION_SUBTYPE = 1
"""BSON binary subtype for functions.
"""
OLD_BINARY_SUBTYPE = 2
"""Old BSON binary subtype for binary data.
This is the old default subtype, the current
default is :data:`BINARY_SUBTYPE`.
"""
OLD_UUID_SUBTYPE = 3
"""Old BSON binary subtype for a UUID.
:class:`uuid.UUID` instances will automatically be encoded
by :mod:`bson` using this subtype.
.. versionadded:: 2.1
"""
UUID_SUBTYPE = 4
"""BSON binary subtype for a UUID.
This is the new BSON binary subtype for UUIDs. The
current default is :data:`OLD_UUID_SUBTYPE` but will
change to this in a future release.
.. versionchanged:: 2.1
Changed to subtype 4.
"""
STANDARD = UUID_SUBTYPE
"""The standard UUID representation.
:class:`uuid.UUID` instances will automatically be encoded to
and decoded from BSON binary, using RFC-4122 byte order with
binary subtype :data:`UUID_SUBTYPE`.
.. versionadded:: 3.0
"""
PYTHON_LEGACY = OLD_UUID_SUBTYPE
"""The Python legacy UUID representation.
:class:`uuid.UUID` instances will automatically be encoded to
and decoded from BSON binary, using RFC-4122 byte order with
binary subtype :data:`OLD_UUID_SUBTYPE`.
.. versionadded:: 3.0
"""
JAVA_LEGACY = 5
"""The Java legacy UUID representation.
:class:`uuid.UUID` instances will automatically be encoded to
and decoded from BSON binary subtype :data:`OLD_UUID_SUBTYPE`,
using the Java driver's legacy byte order.
.. versionchanged:: 3.6
BSON binary subtype 4 is decoded using RFC-4122 byte order.
.. versionadded:: 2.3
"""
CSHARP_LEGACY = 6
"""The C#/.net legacy UUID representation.
:class:`uuid.UUID` instances will automatically be encoded to
and decoded from BSON binary subtype :data:`OLD_UUID_SUBTYPE`,
using the C# driver's legacy byte order.
.. versionchanged:: 3.6
BSON binary subtype 4 is decoded using RFC-4122 byte order.
.. versionadded:: 2.3
"""
ALL_UUID_SUBTYPES = (OLD_UUID_SUBTYPE, UUID_SUBTYPE)
ALL_UUID_REPRESENTATIONS = (STANDARD, PYTHON_LEGACY, JAVA_LEGACY, CSHARP_LEGACY)
UUID_REPRESENTATION_NAMES = {
PYTHON_LEGACY: 'PYTHON_LEGACY',
STANDARD: 'STANDARD',
JAVA_LEGACY: 'JAVA_LEGACY',
CSHARP_LEGACY: 'CSHARP_LEGACY'}
MD5_SUBTYPE = 5
"""BSON binary subtype for an MD5 hash.
"""
USER_DEFINED_SUBTYPE = 128
"""BSON binary subtype for any user defined structure.
"""
class Binary(bytes):
"""Representation of BSON binary data.
This is necessary because we want to represent Python strings as
the BSON string type. We need to wrap binary data so we can tell
the difference between what should be considered binary data and
what should be considered a string when we encode to BSON.
Raises TypeError if `data` is not an instance of :class:`str`
(:class:`bytes` in python 3) or `subtype` is not an instance of
:class:`int`. Raises ValueError if `subtype` is not in [0, 256).
.. note::
In python 3 instances of Binary with subtype 0 will be decoded
directly to :class:`bytes`.
:Parameters:
- `data`: the binary data to represent
- `subtype` (optional): the `binary subtype
<http://bsonspec.org/#/specification>`_
to use
"""
_type_marker = 5
def __new__(cls, data, subtype=BINARY_SUBTYPE):
if not isinstance(data, bytes):
raise TypeError("data must be an instance of bytes")
if not isinstance(subtype, int):
raise TypeError("subtype must be an instance of int")
if subtype >= 256 or subtype < 0:
raise ValueError("subtype must be contained in [0, 256)")
self = bytes.__new__(cls, data)
self.__subtype = subtype
return self
@property
def subtype(self):
"""Subtype of this binary data.
"""
return self.__subtype
def __getnewargs__(self):
# Work around http://bugs.python.org/issue7382
data = super(Binary, self).__getnewargs__()[0]
if PY3 and not isinstance(data, bytes):
data = data.encode('latin-1')
return data, self.__subtype
def __eq__(self, other):
if isinstance(other, Binary):
return ((self.__subtype, bytes(self)) ==
(other.subtype, bytes(other)))
# We don't return NotImplemented here because if we did then
# Binary("foo") == "foo" would return True, since Binary is a
# subclass of str...
return False
def __hash__(self):
return super(Binary, self).__hash__() ^ hash(self.__subtype)
def __ne__(self, other):
return not self == other
def __repr__(self):
return "Binary(%s, %s)" % (bytes.__repr__(self), self.__subtype)
class UUIDLegacy(Binary):
"""UUID wrapper to support working with UUIDs stored as PYTHON_LEGACY.
.. doctest::
>>> import uuid
>>> from bson.binary import Binary, UUIDLegacy, STANDARD
>>> from bson.codec_options import CodecOptions
>>> my_uuid = uuid.uuid4()
>>> coll = db.get_collection('test',
... CodecOptions(uuid_representation=STANDARD))
>>> coll.insert_one({'uuid': Binary(my_uuid.bytes, 3)}).inserted_id
ObjectId('...')
>>> coll.find({'uuid': my_uuid}).count()
0
>>> coll.find({'uuid': UUIDLegacy(my_uuid)}).count()
1
>>> coll.find({'uuid': UUIDLegacy(my_uuid)})[0]['uuid']
UUID('...')
>>>
>>> # Convert from subtype 3 to subtype 4
>>> doc = coll.find_one({'uuid': UUIDLegacy(my_uuid)})
>>> coll.replace_one({"_id": doc["_id"]}, doc).matched_count
1
>>> coll.find({'uuid': UUIDLegacy(my_uuid)}).count()
0
>>> coll.find({'uuid': {'$in': [UUIDLegacy(my_uuid), my_uuid]}}).count()
1
>>> coll.find_one({'uuid': my_uuid})['uuid']
UUID('...')
Raises TypeError if `obj` is not an instance of :class:`~uuid.UUID`.
:Parameters:
- `obj`: An instance of :class:`~uuid.UUID`.
"""
def __new__(cls, obj):
if not isinstance(obj, UUID):
raise TypeError("obj must be an instance of uuid.UUID")
self = Binary.__new__(cls, obj.bytes, OLD_UUID_SUBTYPE)
self.__uuid = obj
return self
def __getnewargs__(self):
# Support copy and deepcopy
return (self.__uuid,)
@property
def uuid(self):
"""UUID instance wrapped by this UUIDLegacy instance.
"""
return self.__uuid
def __repr__(self):
return "UUIDLegacy('%s')" % self.__uuid
# Copyright 2009-present MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tools for representing JavaScript code in BSON.
"""
from bson.py3compat import abc, string_type, PY3, text_type
class Code(str):
"""BSON's JavaScript code type.
Raises :class:`TypeError` if `code` is not an instance of
:class:`basestring` (:class:`str` in python 3) or `scope`
is not ``None`` or an instance of :class:`dict`.
Scope variables can be set by passing a dictionary as the `scope`
argument or by using keyword arguments. If a variable is set as a
keyword argument it will override any setting for that variable in
the `scope` dictionary.
:Parameters:
- `code`: A string containing JavaScript code to be evaluated or another
instance of Code. In the latter case, the scope of `code` becomes this
Code's :attr:`scope`.
- `scope` (optional): dictionary representing the scope in which
`code` should be evaluated - a mapping from identifiers (as
strings) to values. Defaults to ``None``. This is applied after any
scope associated with a given `code` above.
- `**kwargs` (optional): scope variables can also be passed as
keyword arguments. These are applied after `scope` and `code`.
.. versionchanged:: 3.4
The default value for :attr:`scope` is ``None`` instead of ``{}``.
"""
_type_marker = 13
def __new__(cls, code, scope=None, **kwargs):
if not isinstance(code, string_type):
raise TypeError("code must be an "
"instance of %s" % (string_type.__name__))
if not PY3 and isinstance(code, text_type):
self = str.__new__(cls, code.encode('utf8'))
else:
self = str.__new__(cls, code)
try:
self.__scope = code.scope
except AttributeError:
self.__scope = None
if scope is not None:
if not isinstance(scope, abc.Mapping):
raise TypeError("scope must be an instance of dict")
if self.__scope is not None:
self.__scope.update(scope)
else:
self.__scope = scope
if kwargs:
if self.__scope is not None:
self.__scope.update(kwargs)
else:
self.__scope = kwargs
return self
@property
def scope(self):
"""Scope dictionary for this instance or ``None``.
"""
return self.__scope
def __repr__(self):
return "Code(%s, %r)" % (str.__repr__(self), self.__scope)
def __eq__(self, other):
if isinstance(other, Code):
return (self.__scope, str(self)) == (other.__scope, str(other))
return False
__hash__ = None
def __ne__(self, other):
return not self == other
# Copyright 2014-present MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tools for specifying BSON codec options."""
import datetime
from collections import namedtuple
from bson.py3compat import abc, string_type
from bson.binary import (ALL_UUID_REPRESENTATIONS,
PYTHON_LEGACY,
UUID_REPRESENTATION_NAMES)
_RAW_BSON_DOCUMENT_MARKER = 101
def _raw_document_class(document_class):
"""Determine if a document_class is a RawBSONDocument class."""
marker = getattr(document_class, '_type_marker', None)
return marker == _RAW_BSON_DOCUMENT_MARKER
_options_base = namedtuple(
'CodecOptions',
('document_class', 'tz_aware', 'uuid_representation',
'unicode_decode_error_handler', 'tzinfo'))
class CodecOptions(_options_base):
"""Encapsulates options used encoding and / or decoding BSON.
The `document_class` option is used to define a custom type for use
decoding BSON documents. Access to the underlying raw BSON bytes for
a document is available using the :class:`~bson.raw_bson.RawBSONDocument`
type::
>>> from bson.raw_bson import RawBSONDocument
>>> from bson.codec_options import CodecOptions
>>> codec_options = CodecOptions(document_class=RawBSONDocument)
>>> coll = db.get_collection('test', codec_options=codec_options)
>>> doc = coll.find_one()
>>> doc.raw
'\\x16\\x00\\x00\\x00\\x07_id\\x00[0\\x165\\x91\\x10\\xea\\x14\\xe8\\xc5\\x8b\\x93\\x00'
The document class can be any type that inherits from
:class:`~collections.MutableMapping`::
>>> class AttributeDict(dict):
... # A dict that supports attribute access.
... def __getattr__(self, key):
... return self[key]
... def __setattr__(self, key, value):
... self[key] = value
...
>>> codec_options = CodecOptions(document_class=AttributeDict)
>>> coll = db.get_collection('test', codec_options=codec_options)
>>> doc = coll.find_one()
>>> doc._id
ObjectId('5b3016359110ea14e8c58b93')
See :doc:`/examples/datetimes` for examples using the `tz_aware` and
`tzinfo` options.
See :class:`~bson.binary.UUIDLegacy` for examples using the
`uuid_representation` option.
:Parameters:
- `document_class`: BSON documents returned in queries will be decoded
to an instance of this class. Must be a subclass of
:class:`~collections.MutableMapping`. Defaults to :class:`dict`.
- `tz_aware`: If ``True``, BSON datetimes will be decoded to timezone
aware instances of :class:`~datetime.datetime`. Otherwise they will be
naive. Defaults to ``False``.
- `uuid_representation`: The BSON representation to use when encoding
and decoding instances of :class:`~uuid.UUID`. Defaults to
:data:`~bson.binary.PYTHON_LEGACY`.
- `unicode_decode_error_handler`: The error handler to apply when
a Unicode-related error occurs during BSON decoding that would
otherwise raise :exc:`UnicodeDecodeError`. Valid options include
'strict', 'replace', and 'ignore'. Defaults to 'strict'.
- `tzinfo`: A :class:`~datetime.tzinfo` subclass that specifies the
timezone to/from which :class:`~datetime.datetime` objects should be
encoded/decoded.
.. warning:: Care must be taken when changing
`unicode_decode_error_handler` from its default value ('strict').
The 'replace' and 'ignore' modes should not be used when documents
retrieved from the server will be modified in the client application
and stored back to the server.
"""
def __new__(cls, document_class=dict,
tz_aware=False, uuid_representation=PYTHON_LEGACY,
unicode_decode_error_handler="strict",
tzinfo=None):
if not (issubclass(document_class, abc.MutableMapping) or
_raw_document_class(document_class)):
raise TypeError("document_class must be dict, bson.son.SON, "
"bson.raw_bson.RawBSONDocument, or a "
"sublass of collections.MutableMapping")
if not isinstance(tz_aware, bool):
raise TypeError("tz_aware must be True or False")
if uuid_representation not in ALL_UUID_REPRESENTATIONS:
raise ValueError("uuid_representation must be a value "
"from bson.binary.ALL_UUID_REPRESENTATIONS")
if not isinstance(unicode_decode_error_handler, (string_type, None)):
raise ValueError("unicode_decode_error_handler must be a string "
"or None")
if tzinfo is not None:
if not isinstance(tzinfo, datetime.tzinfo):
raise TypeError(
"tzinfo must be an instance of datetime.tzinfo")
if not tz_aware:
raise ValueError(
"cannot specify tzinfo without also setting tz_aware=True")
return tuple.__new__(
cls, (document_class, tz_aware, uuid_representation,
unicode_decode_error_handler, tzinfo))
def _arguments_repr(self):
"""Representation of the arguments used to create this object."""
document_class_repr = (
'dict' if self.document_class is dict
else repr(self.document_class))
uuid_rep_repr = UUID_REPRESENTATION_NAMES.get(self.uuid_representation,
self.uuid_representation)
return ('document_class=%s, tz_aware=%r, uuid_representation='
'%s, unicode_decode_error_handler=%r, tzinfo=%r' %
(document_class_repr, self.tz_aware, uuid_rep_repr,
self.unicode_decode_error_handler, self.tzinfo))
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, self._arguments_repr())
def with_options(self, **kwargs):
"""Make a copy of this CodecOptions, overriding some options::
>>> from bson.codec_options import DEFAULT_CODEC_OPTIONS
>>> DEFAULT_CODEC_OPTIONS.tz_aware
False
>>> options = DEFAULT_CODEC_OPTIONS.with_options(tz_aware=True)
>>> options.tz_aware
True
.. versionadded:: 3.5
"""
return CodecOptions(
kwargs.get('document_class', self.document_class),
kwargs.get('tz_aware', self.tz_aware),
kwargs.get('uuid_representation', self.uuid_representation),
kwargs.get('unicode_decode_error_handler',
self.unicode_decode_error_handler),
kwargs.get('tzinfo', self.tzinfo))
DEFAULT_CODEC_OPTIONS = CodecOptions()
def _parse_codec_options(options):
"""Parse BSON codec options."""
return CodecOptions(
document_class=options.get(
'document_class', DEFAULT_CODEC_OPTIONS.document_class),
tz_aware=options.get(
'tz_aware', DEFAULT_CODEC_OPTIONS.tz_aware),
uuid_representation=options.get(
'uuidrepresentation', DEFAULT_CODEC_OPTIONS.uuid_representation),
unicode_decode_error_handler=options.get(
'unicode_decode_error_handler',
DEFAULT_CODEC_OPTIONS.unicode_decode_error_handler),
tzinfo=options.get('tzinfo', DEFAULT_CODEC_OPTIONS.tzinfo))
# Copyright 2009-2015 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tools for manipulating DBRefs (references to MongoDB documents)."""
from copy import deepcopy
from bson.py3compat import iteritems, string_type
from bson.son import SON
class DBRef(object):
"""A reference to a document stored in MongoDB.
"""
# DBRef isn't actually a BSON "type" so this number was arbitrarily chosen.
_type_marker = 100
def __init__(self, collection, id, database=None, _extra={}, **kwargs):
"""Initialize a new :class:`DBRef`.
Raises :class:`TypeError` if `collection` or `database` is not
an instance of :class:`basestring` (:class:`str` in python 3).
`database` is optional and allows references to documents to work
across databases. Any additional keyword arguments will create
additional fields in the resultant embedded document.
:Parameters:
- `collection`: name of the collection the document is stored in
- `id`: the value of the document's ``"_id"`` field
- `database` (optional): name of the database to reference
- `**kwargs` (optional): additional keyword arguments will
create additional, custom fields
.. mongodoc:: dbrefs
"""
if not isinstance(collection, string_type):
raise TypeError("collection must be an "
"instance of %s" % string_type.__name__)
if database is not None and not isinstance(database, string_type):
raise TypeError("database must be an "
"instance of %s" % string_type.__name__)
self.__collection = collection
self.__id = id
self.__database = database
kwargs.update(_extra)
self.__kwargs = kwargs
@property
def collection(self):
"""Get the name of this DBRef's collection as unicode.
"""
return self.__collection
@property
def id(self):
"""Get this DBRef's _id.
"""
return self.__id
@property
def database(self):
"""Get the name of this DBRef's database.
Returns None if this DBRef doesn't specify a database.
"""
return self.__database
def __getattr__(self, key):
try:
return self.__kwargs[key]
except KeyError:
raise AttributeError(key)
# Have to provide __setstate__ to avoid
# infinite recursion since we override
# __getattr__.
def __setstate__(self, state):
self.__dict__.update(state)
def as_doc(self):
"""Get the SON document representation of this DBRef.
Generally not needed by application developers
"""
doc = SON([("$ref", self.collection),
("$id", self.id)])
if self.database is not None:
doc["$db"] = self.database
doc.update(self.__kwargs)
return doc
def __repr__(self):
extra = "".join([", %s=%r" % (k, v)
for k, v in iteritems(self.__kwargs)])
if self.database is None:
return "DBRef(%r, %r%s)" % (self.collection, self.id, extra)
return "DBRef(%r, %r, %r%s)" % (self.collection, self.id,
self.database, extra)
def __eq__(self, other):
if isinstance(other, DBRef):
us = (self.__database, self.__collection,
self.__id, self.__kwargs)
them = (other.__database, other.__collection,
other.__id, other.__kwargs)
return us == them
return NotImplemented
def __ne__(self, other):
return not self == other
def __hash__(self):
"""Get a hash value for this :class:`DBRef`."""
return hash((self.__collection, self.__id, self.__database,
tuple(sorted(self.__kwargs.items()))))
def __deepcopy__(self, memo):
"""Support function for `copy.deepcopy()`."""
return DBRef(deepcopy(self.__collection, memo),
deepcopy(self.__id, memo),
deepcopy(self.__database, memo),
deepcopy(self.__kwargs, memo))
# Copyright 2009-present MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Exceptions raised by the BSON package."""
class BSONError(Exception):
"""Base class for all BSON exceptions.
"""
class InvalidBSON(BSONError):
"""Raised when trying to create a BSON object from invalid data.
"""
class InvalidStringData(BSONError):
"""Raised when trying to encode a string containing non-UTF8 data.
"""
class InvalidDocument(BSONError):
"""Raised when trying to create a BSON object from an invalid document.
"""
class InvalidId(BSONError):
"""Raised when trying to create an ObjectId from invalid data.
"""
# Copyright 2014-2015 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A BSON wrapper for long (int in python3)"""
from bson.py3compat import PY3
if PY3:
long = int
class Int64(long):
"""Representation of the BSON int64 type.
This is necessary because every integral number is an :class:`int` in
Python 3. Small integral numbers are encoded to BSON int32 by default,
but Int64 numbers will always be encoded to BSON int64.
:Parameters:
- `value`: the numeric value to represent
"""
_type_marker = 18
# Copyright 2010-present MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Representation for the MongoDB internal MaxKey type.
"""
class MaxKey(object):
"""MongoDB internal MaxKey type.
.. versionchanged:: 2.7
``MaxKey`` now implements comparison operators.
"""
_type_marker = 127
def __eq__(self, other):
return isinstance(other, MaxKey)
def __hash__(self):
return hash(self._type_marker)
def __ne__(self, other):
return not self == other
def __le__(self, other):
return isinstance(other, MaxKey)
def __lt__(self, dummy):
return False
def __ge__(self, dummy):
return True
def __gt__(self, other):
return not isinstance(other, MaxKey)
def __repr__(self):
return "MaxKey()"
# Copyright 2010-present MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Representation for the MongoDB internal MinKey type.
"""
class MinKey(object):
"""MongoDB internal MinKey type.
.. versionchanged:: 2.7
``MinKey`` now implements comparison operators.
"""
_type_marker = 255
def __eq__(self, other):
return isinstance(other, MinKey)
def __hash__(self):
return hash(self._type_marker)
def __ne__(self, other):
return not self == other
def __le__(self, dummy):
return True
def __lt__(self, other):
return not isinstance(other, MinKey)
def __ge__(self, other):
return isinstance(other, MinKey)
def __gt__(self, dummy):
return False
def __repr__(self):
return "MinKey()"
# Copyright 2009-2015 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tools for working with MongoDB `ObjectIds
<http://dochub.mongodb.org/core/objectids>`_.
"""
import binascii
import calendar
import datetime
import os
import random
import socket
import struct
import threading
import time
from bson.errors import InvalidId
from bson.py3compat import PY3, bytes_from_hex, string_type, text_type
from bson.tz_util import utc
if PY3:
_ord = lambda x: x
else:
_ord = ord
# http://isthe.com/chongo/tech/comp/fnv/index.html#FNV-1a
def _fnv_1a_24(data, _ord=_ord):
"""FNV-1a 24 bit hash"""
# http://www.isthe.com/chongo/tech/comp/fnv/index.html#xor-fold
# Start with FNV-1a 32 bit.
hash_size = 2 ** 32
fnv_32_prime = 16777619
fnv_1a_hash = 2166136261 # 32-bit FNV-1 offset basis
for elt in data:
fnv_1a_hash = fnv_1a_hash ^ _ord(elt)
fnv_1a_hash = (fnv_1a_hash * fnv_32_prime) % hash_size
# xor-fold the result to 24 bit.
return (fnv_1a_hash >> 24) ^ (fnv_1a_hash & 0xffffff)
def _machine_bytes():
"""Get the machine portion of an ObjectId.
"""
# gethostname() returns a unicode string in python 3.x
# We only need 3 bytes, and _fnv_1a_24 returns a 24 bit integer.
# Remove the padding byte.
return struct.pack("<I", _fnv_1a_24(socket.gethostname().encode()))[:3]
def _raise_invalid_id(oid):
raise InvalidId(
"%r is not a valid ObjectId, it must be a 12-byte input"
" or a 24-character hex string" % oid)
class ObjectId(object):
"""A MongoDB ObjectId.
"""
_inc = random.randint(0, 0xFFFFFF)
_inc_lock = threading.Lock()
_machine_bytes = _machine_bytes()
__slots__ = ('__id')
_type_marker = 7
def __init__(self, oid=None):
"""Initialize a new ObjectId.
An ObjectId is a 12-byte unique identifier consisting of:
- a 4-byte value representing the seconds since the Unix epoch,
- a 3-byte machine identifier,
- a 2-byte process id, and
- a 3-byte counter, starting with a random value.
By default, ``ObjectId()`` creates a new unique identifier. The
optional parameter `oid` can be an :class:`ObjectId`, or any 12
:class:`bytes` or, in Python 2, any 12-character :class:`str`.
For example, the 12 bytes b'foo-bar-quux' do not follow the ObjectId
specification but they are acceptable input::
>>> ObjectId(b'foo-bar-quux')
ObjectId('666f6f2d6261722d71757578')
`oid` can also be a :class:`unicode` or :class:`str` of 24 hex digits::
>>> ObjectId('0123456789ab0123456789ab')
ObjectId('0123456789ab0123456789ab')
>>>
>>> # A u-prefixed unicode literal:
>>> ObjectId(u'0123456789ab0123456789ab')
ObjectId('0123456789ab0123456789ab')
Raises :class:`~bson.errors.InvalidId` if `oid` is not 12 bytes nor
24 hex digits, or :class:`TypeError` if `oid` is not an accepted type.
:Parameters:
- `oid` (optional): a valid ObjectId.
.. mongodoc:: objectids
"""
if oid is None:
self.__generate()
elif isinstance(oid, bytes) and len(oid) == 12:
self.__id = oid
else:
self.__validate(oid)
@classmethod
def from_datetime(cls, generation_time):
"""Create a dummy ObjectId instance with a specific generation time.
This method is useful for doing range queries on a field
containing :class:`ObjectId` instances.
.. warning::
It is not safe to insert a document containing an ObjectId
generated using this method. This method deliberately
eliminates the uniqueness guarantee that ObjectIds
generally provide. ObjectIds generated with this method
should be used exclusively in queries.
`generation_time` will be converted to UTC. Naive datetime
instances will be treated as though they already contain UTC.
An example using this helper to get documents where ``"_id"``
was generated before January 1, 2010 would be:
>>> gen_time = datetime.datetime(2010, 1, 1)
>>> dummy_id = ObjectId.from_datetime(gen_time)
>>> result = collection.find({"_id": {"$lt": dummy_id}})
:Parameters:
- `generation_time`: :class:`~datetime.datetime` to be used
as the generation time for the resulting ObjectId.
"""
if generation_time.utcoffset() is not None:
generation_time = generation_time - generation_time.utcoffset()
timestamp = calendar.timegm(generation_time.timetuple())
oid = struct.pack(
">i", int(timestamp)) + b"\x00\x00\x00\x00\x00\x00\x00\x00"
return cls(oid)
@classmethod
def is_valid(cls, oid):
"""Checks if a `oid` string is valid or not.
:Parameters:
- `oid`: the object id to validate
.. versionadded:: 2.3
"""
if not oid:
return False
try:
ObjectId(oid)
return True
except (InvalidId, TypeError):
return False
def __generate(self):
"""Generate a new value for this ObjectId.
"""
# 4 bytes current time
oid = struct.pack(">i", int(time.time()))
# 3 bytes machine
oid += ObjectId._machine_bytes
# 2 bytes pid
oid += struct.pack(">H", os.getpid() % 0xFFFF)
# 3 bytes inc
with ObjectId._inc_lock:
oid += struct.pack(">i", ObjectId._inc)[1:4]
ObjectId._inc = (ObjectId._inc + 1) % 0xFFFFFF
self.__id = oid
def __validate(self, oid):
"""Validate and use the given id for this ObjectId.
Raises TypeError if id is not an instance of
(:class:`basestring` (:class:`str` or :class:`bytes`
in python 3), ObjectId) and InvalidId if it is not a
valid ObjectId.
:Parameters:
- `oid`: a valid ObjectId
"""
if isinstance(oid, ObjectId):
self.__id = oid.binary
# bytes or unicode in python 2, str in python 3
elif isinstance(oid, string_type):
if len(oid) == 24:
try:
self.__id = bytes_from_hex(oid)
except (TypeError, ValueError):
_raise_invalid_id(oid)
else:
_raise_invalid_id(oid)
else:
raise TypeError("id must be an instance of (bytes, %s, ObjectId), "
"not %s" % (text_type.__name__, type(oid)))
@property
def binary(self):
"""12-byte binary representation of this ObjectId.
"""
return self.__id
@property
def generation_time(self):
"""A :class:`datetime.datetime` instance representing the time of
generation for this :class:`ObjectId`.
The :class:`datetime.datetime` is timezone aware, and
represents the generation time in UTC. It is precise to the
second.
"""
timestamp = struct.unpack(">i", self.__id[0:4])[0]
return datetime.datetime.fromtimestamp(timestamp, utc)
def __getstate__(self):
"""return value of object for pickling.
needed explicitly because __slots__() defined.
"""
return self.__id
def __setstate__(self, value):
"""explicit state set from pickling
"""
# Provide backwards compatability with OIDs
# pickled with pymongo-1.9 or older.
if isinstance(value, dict):
oid = value["_ObjectId__id"]
else:
oid = value
# ObjectIds pickled in python 2.x used `str` for __id.
# In python 3.x this has to be converted to `bytes`
# by encoding latin-1.
if PY3 and isinstance(oid, text_type):
self.__id = oid.encode('latin-1')
else:
self.__id = oid
def __str__(self):
if PY3:
return binascii.hexlify(self.__id).decode()
return binascii.hexlify(self.__id)
def __repr__(self):
return "ObjectId('%s')" % (str(self),)
def __eq__(self, other):
if isinstance(other, ObjectId):
return self.__id == other.binary
return NotImplemented
def __ne__(self, other):
if isinstance(other, ObjectId):
return self.__id != other.binary
return NotImplemented
def __lt__(self, other):
if isinstance(other, ObjectId):
return self.__id < other.binary
return NotImplemented
def __le__(self, other):
if isinstance(other, ObjectId):
return self.__id <= other.binary
return NotImplemented
def __gt__(self, other):
if isinstance(other, ObjectId):
return self.__id > other.binary
return NotImplemented
def __ge__(self, other):
if isinstance(other, ObjectId):
return self.__id >= other.binary
return NotImplemented
def __hash__(self):
"""Get a hash value for this :class:`ObjectId`."""
return hash(self.__id)
# Copyright 2009-present MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you
# may not use this file except in compliance with the License. You
# may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
"""Utility functions and definitions for python3 compatibility."""
import sys
PY3 = sys.version_info[0] == 3
if PY3:
import codecs
import _thread as thread
from io import BytesIO as StringIO
try:
import collections.abc as abc
except ImportError:
# PyPy3 (based on CPython 3.2)
import collections as abc
MAXSIZE = sys.maxsize
imap = map
def b(s):
# BSON and socket operations deal in binary data. In
# python 3 that means instances of `bytes`. In python
# 2.6 and 2.7 you can create an alias for `bytes` using
# the b prefix (e.g. b'foo').
# See http://python3porting.com/problems.html#nicer-solutions
return codecs.latin_1_encode(s)[0]
def bytes_from_hex(h):
return bytes.fromhex(h)
def iteritems(d):
return iter(d.items())
def itervalues(d):
return iter(d.values())
def reraise(exctype, value, trace=None):
raise exctype(str(value)).with_traceback(trace)
def reraise_instance(exc_instance, trace=None):
raise exc_instance.with_traceback(trace)
def _unicode(s):
return s
text_type = str
string_type = str
integer_types = int
else:
import collections as abc
import thread
from itertools import imap
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
MAXSIZE = sys.maxint
def b(s):
# See comments above. In python 2.x b('foo') is just 'foo'.
return s
def bytes_from_hex(h):
return h.decode('hex')
def iteritems(d):
return d.iteritems()
def itervalues(d):
return d.itervalues()
def reraise(exctype, value, trace=None):
_reraise(exctype, str(value), trace)
def reraise_instance(exc_instance, trace=None):
_reraise(exc_instance, None, trace)
# "raise x, y, z" raises SyntaxError in Python 3
exec("""def _reraise(exc, value, trace):
raise exc, value, trace
""")
_unicode = unicode
string_type = basestring
text_type = unicode
integer_types = (int, long)
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment