[lttng-dev] [babeltrace PATCH] Babeltrace python module v3
Mathieu Desnoyers
mathieu.desnoyers at efficios.com
Fri Aug 10 17:26:22 EDT 2012
* Danny Serres (danny.serres at efficios.com) wrote:
> The Babeltrace Python module can be used to directly control
> the Babeltrace API inside Python, using 'import babeltrace'.
>
> Therefore, it becomes possible to create a Context, add a
> trace to it, iterate on it, read events and so on from
> within Python.
>
> SWIG >= 2.0 is used to create the wrapper and its
> 'warning md variable unused' bug is patched in Makefile.am
>
> In the interface file, struct and enum are directly copied
> from the include files. All changes to struct bt_iter_pos
> and to enums in ctf/events.h and clock-types.h must also
> be made in the interface file.
Please change the
.gitignore:
*.m4
to
/m4/libtool.m4
/m4/lt~obsolete.m4
/m4/ltoptions.m4
/m4/ltsugar.m4
/m4/ltversion.m4
otherwise git add complains (rightfully).
Also, please add a license header at the beginning of each file. Please
use a wording similar to:
/*
* filename.ext
*
* Babeltrace somescriptname
*
* Copyright 2012 EfficiOS Inc.
*
* Author: Danny Serres <danny.serres at efficios.com>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*/
In python, you might need to use:
# filename.ext
#
# Babeltrace somescriptname
#
# Copyright 2012 EfficiOS Inc.
#
# Author: Danny Serres <danny.serres at efficios.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
The rest looks good,
Thanks!
Mathieu
>
> Signed-off-by: Danny Serres <danny.serres at efficios.com>
> Signed-off-by: Yannick Brosseau <yannick.brosseau at gmail.com>
> ---
> .gitignore | 4 +
> Makefile.am | 2 +-
> README | 6 +
> bindings/Makefile.am | 3 +
> bindings/python/Makefile.am | 28 +
> bindings/python/babeltrace.i.in | 1079 +++++++++
> bindings/python/examples/babeltrace_and_lttng.py | 107 +
> bindings/python/examples/eventcount.py | 66 +
> bindings/python/examples/eventcountlist.py | 65 +
> bindings/python/examples/events_per_cpu.py | 81 +
> bindings/python/examples/example-api-test.py | 59 +
> bindings/python/examples/histogram.py | 121 +
> .../examples/output_format_modules/cairoplot.py | 2336 ++++++++++++++++++++
> .../examples/output_format_modules/pprint_table.py | 35 +
> .../examples/output_format_modules/series.py | 1140 ++++++++++
> bindings/python/examples/sched_switch.py | 110 +
> bindings/python/examples/softirqtimes.py | 130 ++
> bindings/python/examples/syscalls_by_pid.py | 61 +
> bindings/python/python-complements.c | 105 +
> bindings/python/python-complements.h | 36 +
> bootstrap | 2 +-
> configure.ac | 37 +
> doc/python-howto.txt | 70 +
> m4/ax_pkg_swig.m4 | 135 ++
> tests/tests-python.py | 115 +
> 25 files changed, 5931 insertions(+), 2 deletions(-)
> create mode 100644 bindings/Makefile.am
> create mode 100644 bindings/python/Makefile.am
> create mode 100644 bindings/python/babeltrace.i.in
> create mode 100644 bindings/python/examples/babeltrace_and_lttng.py
> create mode 100644 bindings/python/examples/eventcount.py
> create mode 100644 bindings/python/examples/eventcountlist.py
> create mode 100644 bindings/python/examples/events_per_cpu.py
> create mode 100644 bindings/python/examples/example-api-test.py
> create mode 100644 bindings/python/examples/histogram.py
> create mode 100644 bindings/python/examples/output_format_modules/__init__.py
> create mode 100755 bindings/python/examples/output_format_modules/cairoplot.py
> create mode 100644 bindings/python/examples/output_format_modules/pprint_table.py
> create mode 100755 bindings/python/examples/output_format_modules/series.py
> create mode 100644 bindings/python/examples/sched_switch.py
> create mode 100644 bindings/python/examples/softirqtimes.py
> create mode 100644 bindings/python/examples/syscalls_by_pid.py
> create mode 100644 bindings/python/python-complements.c
> create mode 100644 bindings/python/python-complements.h
> create mode 100644 doc/python-howto.txt
> create mode 100644 m4/ax_pkg_swig.m4
> create mode 100644 tests/tests-python.py
>
> diff --git a/.gitignore b/.gitignore
> index d6098ac..3fe60e7 100644
> --- a/.gitignore
> +++ b/.gitignore
> @@ -1,4 +1,5 @@
> /tests/test-bitfield
> +*~
> *.o
> *.a
> *.la
> @@ -26,3 +27,6 @@ converter/babeltrace-log
> core
> formats/ctf/metadata/ctf-parser.output
> stamp-h1
> +bindings/python/babeltrace.i
> +bindings/python/babeltrace.py
> +bindings/python/babeltrace_wrap.c
> diff --git a/Makefile.am b/Makefile.am
> index 308ee16..6584c5d 100644
> --- a/Makefile.am
> +++ b/Makefile.am
> @@ -2,7 +2,7 @@ AM_CFLAGS = $(PACKAGE_CFLAGS) -I$(top_srcdir)/include
>
> ACLOCAL_AMFLAGS = -I m4
>
> -SUBDIRS = include types lib formats converter tests doc
> +SUBDIRS = include types lib formats converter bindings tests doc
>
> dist_doc_DATA = ChangeLog LICENSE mit-license.txt gpl-2.0.txt \
> std-ext-lib.txt
> diff --git a/README b/README
> index 75bf0cf..1687075 100644
> --- a/README
> +++ b/README
> @@ -25,6 +25,7 @@ BUILDING
> make install
> ldconfig
>
> + If you do not want Python bindings, run ./configure --disable-python.
>
> DEPENDENCIES
> ------------
> @@ -44,6 +45,11 @@ To compile Babeltrace, you will need:
> libpopt >= 1.13 development libraries
> (Debian : libpopt-dev)
> (Fedora : popt)
> + python headers (optional)
> + (Debian/Ubuntu : python-dev)
> + swig >= 2.0 (optional)
> + (Debian/Ubuntu : swig2.0)
> +
>
> For developers using the git tree:
>
> diff --git a/bindings/Makefile.am b/bindings/Makefile.am
> new file mode 100644
> index 0000000..dcd868d
> --- /dev/null
> +++ b/bindings/Makefile.am
> @@ -0,0 +1,3 @@
> +if USE_PYTHON
> +SUBDIRS = python
> +endif
> diff --git a/bindings/python/Makefile.am b/bindings/python/Makefile.am
> new file mode 100644
> index 0000000..579759f
> --- /dev/null
> +++ b/bindings/python/Makefile.am
> @@ -0,0 +1,28 @@
> +babeltrace.i: babeltrace.i.in
> + sed "s/BABELTRACE_VERSION_STR/Babeltrace $(PACKAGE_VERSION)/g" <babeltrace.i.in >babeltrace.i
> +
> +AM_CFLAGS = -I$(PYTHON_INCLUDE) -I$(top_srcdir)/include/
> +
> +EXTRA_DIST = babeltrace.i
> +python_PYTHON = babeltrace.py
> +pyexec_LTLIBRARIES = _babeltrace.la
> +
> +MAINTAINERCLEANFILES = babeltrace_wrap.c babeltrace.py
> +
> +_babeltrace_la_SOURCES = babeltrace_wrap.c python-complements.c
> +
> +_babeltrace_la_LDFLAGS = -module
> +
> +_babeltrace_la_CFLAGS = $(GLIB_CFLAGS) $(AM_CFLAGS)
> +
> +_babeltrace_la_LIBS = $(GLIB_LIBS)
> +
> +_babeltrace_la_LIBADD = $(top_srcdir)/formats/ctf/libbabeltrace-ctf.la \
> + $(top_srcdir)/formats/ctf-text/libbabeltrace-ctf-text.la
> +
> +# SWIG 'warning md variable unused' fixed after SWIG build:
> +babeltrace_wrap.c: babeltrace.i
> + $(SWIG) -python -Wall -I. -I$(top_srcdir)/include babeltrace.i
> + sed -i "s/PyObject \*m, \*d, \*md;/PyObject \*m, \*d;\n#if defined(SWIGPYTHON_BUILTIN)\nPyObject *md;\n#endif/g" babeltrace_wrap.c
> + sed -i "s/md = d/d/g" babeltrace_wrap.c
> + sed -i "s/(void)public_symbol;/(void)public_symbol;\n md = d;/g" babeltrace_wrap.c
> diff --git a/bindings/python/babeltrace.i.in b/bindings/python/babeltrace.i.in
> new file mode 100644
> index 0000000..49636d1
> --- /dev/null
> +++ b/bindings/python/babeltrace.i.in
> @@ -0,0 +1,1079 @@
> +/* BABELTRACE PYTHON MODULE interface file */
> +
> +%define DOCSTRING
> +"BABELTRACE_VERSION_STR
> +
> +Babeltrace is a trace viewer and converter reading and writing the
> +Common Trace Format (CTF). Its main use is to pretty-print CTF
> +traces into a human-readable text output.
> +
> +To use this module, the first step is to create a Context and add a
> +trace to it."
> +%enddef
> +
> +%module(docstring=DOCSTRING) babeltrace
> +
> +%include "typemaps.i"
> +%{
> +#define SWIG_FILE_WITH_INIT
> +#include <babeltrace/babeltrace.h>
> +#include <babeltrace/babeltrace-internal.h>
> +#include <babeltrace/trace-handle.h>
> +#include <babeltrace/trace-handle-internal.h>
> +#include <babeltrace/context.h>
> +#include <babeltrace/context-internal.h>
> +#include <babeltrace/iterator.h>
> +#include <babeltrace/iterator-internal.h>
> +#include <babeltrace/format.h>
> +#include <babeltrace/list.h>
> +#include <babeltrace/uuid.h>
> +#include <babeltrace/types.h>
> +#include <babeltrace/ctf/iterator.h>
> +#include "python-complements.h"
> +%}
> +
> +typedef unsigned long long uint64_t;
> +typedef long long int64_t;
> +typedef int bt_intern_str;
> +
> +/* =================================================================
> + CONTEXT.H, CONTEXT-INTERNAL.H
> + ¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯
> +*/
> +
> +%rename("_bt_context_create") bt_context_create(void);
> +%rename("_bt_context_add_trace") bt_context_add_trace(
> + struct bt_context *ctx, const char *path, const char *format,
> + void (*packet_seek)(struct stream_pos *pos, size_t index, int whence),
> + struct mmap_stream_list *stream_list, FILE *metadata);
> +%rename("_bt_context_remove_trace") bt_context_remove_trace(
> + struct bt_context *ctx, int trace_id);
> +%rename("_bt_context_get") bt_context_get(struct bt_context *ctx);
> +%rename("_bt_context_put") bt_context_put(struct bt_context *ctx);
> +%rename("_bt_ctf_event_get_context") bt_ctf_event_get_context(
> + const struct bt_ctf_event *event);
> +
> +struct bt_context *bt_context_create(void);
> +int bt_context_add_trace(struct bt_context *ctx, const char *path, const char *format,
> + void (*packet_seek)(struct stream_pos *pos, size_t index, int whence),
> + struct mmap_stream_list *stream_list, FILE *metadata);
> +void bt_context_remove_trace(struct bt_context *ctx, int trace_id);
> +void bt_context_get(struct bt_context *ctx);
> +void bt_context_put(struct bt_context *ctx);
> +struct bt_context *bt_ctf_event_get_context(const struct bt_ctf_event *event);
> +
> +// class Context to prevent direct access to struct bt_context
> +%pythoncode%{
> +class Context:
> + """
> + The context represents the object in which a trace_collection is
> + open. As long as this structure is allocated, the trace_collection
> + is open and the traces it contains can be read and seeked by the
> + iterators and callbacks.
> + """
> +
> + def __init__(self):
> + self._c = _bt_context_create()
> +
> + def __del__(self):
> + _bt_context_put(self._c)
> +
> + def add_trace(self, path, format_str,
> + packet_seek=None, stream_list=None, metadata=None):
> + """
> + Add a trace by path to the context.
> +
> + Open a trace.
> +
> + path is the path to the trace, it is not recursive.
> + If "path" is None, stream_list is used instead as a list
> + of mmap streams to open for the trace.
> +
> + format is a string containing the format name in which the trace was
> + produced.
> +
> + packet_seek is not implemented for Python. Should be left None to
> + use the default packet_seek handler provided by the trace format.
> +
> + stream_list is a linked list of streams, it is used to open a trace
> + where the trace data is located in memory mapped areas instead of
> + trace files, this argument should be None when path is not None.
> +
> + The metadata parameter acts as a metadata override when not None,
> + otherwise the format handles the metadata opening.
> +
> + Return: the corresponding TraceHandle on success or None on error.
> + """
> + if metadata is not None:
> + metadata = metadata._file
> +
> + ret = _bt_context_add_trace(self._c, path, format_str, packet_seek,
> + stream_list, metadata)
> + if ret < 0:
> + return None
> +
> + th = TraceHandle.__new__(TraceHandle)
> + th._id = ret
> + return th
> +
> + def add_traces_recursive(self, path, format_str):
> + """
> + Open a trace recursively.
> +
> + Find each trace present in the subdirectory starting from the given
> + path, and add them to the context.
> +
> + Return a dict of TraceHandle instances (the full path is the key).
> + Return None on error.
> + """
> +
> + import os
> +
> + trace_handles = {}
> +
> + noTrace = True
> + error = False
> +
> + for fullpath, dirs, files in os.walk(path):
> + if "metadata" in files:
> + trace_handle = self.add_trace(fullpath, format_str)
> + if trace_handle is None:
> + error = True
> + continue
> +
> + trace_handles[fullpath] = trace_handle
> + noTrace = False
> +
> + if noTrace and error:
> + return None
> + return trace_handles
> +
> + def remove_trace(self, trace_handle):
> + """
> + Remove a trace from the context.
> + Effectively closing the trace.
> + """
> + try:
> + _bt_context_remove_trace(self._c, trace_handle._id)
> + except AttributeError:
> + raise TypeError("in remove_trace, "
> + "argument 2 must be a TraceHandle instance")
> +%}
> +
> +
> +
> +/* =================================================================
> + FORMAT.H, REGISTRY
> + ¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯
> +*/
> +
> +%rename("lookup_format") bt_lookup_format(bt_intern_str qname);
> +%rename("_bt_print_format_list") bt_fprintf_format_list(FILE *fp);
> +%rename("register_format") bt_register_format(struct format *format);
> +
> +extern struct format *bt_lookup_format(bt_intern_str qname);
> +extern void bt_fprintf_format_list(FILE *fp);
> +extern int bt_register_format(struct format *format);
> +
> +void format_init(void);
> +void format_finalize(void);
> +
> +%pythoncode %{
> +
> +def print_format_list(babeltrace_file):
> + """
> + Print a list of available formats to file.
> +
> + babeltrace_file must be a File instance opened in write mode.
> + """
> + try:
> + if babeltrace_file._file is not None:
> + _bt_print_format_list(babeltrace_file._file)
> + except AttributeError:
> + raise TypeError("in print_format_list, "
> + "argument 1 must be a File instance")
> +
> +%}
> +
> +
> +/* =================================================================
> + ITERATOR.H, ITERATOR-INTERNAL.H
> + ¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯
> +*/
> +
> +%rename("_bt_iter_create") bt_iter_create(struct bt_context *ctx,
> + const struct bt_iter_pos *begin_pos, const struct bt_iter_pos *end_pos);
> +%rename("_bt_iter_destroy") bt_iter_destroy(struct bt_iter *iter);
> +%rename("_bt_iter_next") bt_iter_next(struct bt_iter *iter);
> +%rename("_bt_iter_get_pos") bt_iter_get_pos(struct bt_iter *iter);
> +%rename("_bt_iter_free_pos") bt_iter_free_pos(struct bt_iter_pos *pos);
> +%rename("_bt_iter_set_pos") bt_iter_set_pos(struct bt_iter *iter,
> + const struct bt_iter_pos *pos);
> +%rename("_bt_iter_create_time_pos") bt_iter_create_time_pos(struct bt_iter *iter,
> + uint64_t timestamp);
> +
> +struct bt_iter *bt_iter_create(struct bt_context *ctx,
> + const struct bt_iter_pos *begin_pos, const struct bt_iter_pos *end_pos);
> +void bt_iter_destroy(struct bt_iter *iter);
> +int bt_iter_next(struct bt_iter *iter);
> +struct bt_iter_pos *bt_iter_get_pos(struct bt_iter *iter);
> +void bt_iter_free_pos(struct bt_iter_pos *pos);
> +int bt_iter_set_pos(struct bt_iter *iter, const struct bt_iter_pos *pos);
> +struct bt_iter_pos *bt_iter_create_time_pos(struct bt_iter *iter, uint64_t timestamp);
> +
> +%rename("_bt_iter_pos") bt_iter_pos;
> +%rename("SEEK_TIME") BT_SEEK_TIME;
> +%rename("SEEK_RESTORE") BT_SEEK_RESTORE;
> +%rename("SEEK_CUR") BT_SEEK_CUR;
> +%rename("SEEK_BEGIN") BT_SEEK_BEGIN;
> +%rename("SEEK_END") BT_SEEK_END;
> +
> +
> +// This struct is taken from iterator.h
> +// All changes to the struct must also be made here
> +struct bt_iter_pos {
> + enum {
> + BT_SEEK_TIME, /* uses u.seek_time */
> + BT_SEEK_RESTORE, /* uses u.restore */
> + BT_SEEK_CUR,
> + BT_SEEK_BEGIN,
> + BT_SEEK_END,
> + } type;
> + union {
> + uint64_t seek_time;
> + struct bt_saved_pos *restore;
> + } u;
> +};
> +
> +
> +%pythoncode%{
> +
> +class IterPos:
> + """This class represents the position where to set an iterator."""
> +
> + __can_access = False
> +
> + def __init__(self, seek_type, seek_time = None):
> + """
> + seek_type represents the type of seek to use.
> + seek_time is the timestamp to seek to when using SEEK_TIME, it
> + is expressed in nanoseconds
> + Only use SEEK_RESTORE on IterPos obtained from the get_pos function
> + in Iter class.
> + """
> +
> + self._pos = _bt_iter_pos()
> + self._pos.type = seek_type
> + if seek_time and seek_type == SEEK_TIME:
> + self._pos.u.seek_time = seek_time
> + self.__can_access = True
> +
> + def __del__(self):
> + if not self.__can_access:
> + _bt_iter_free_pos(self._pos)
> +
> + def _get_type(self):
> + if not __can_access:
> + raise AttributeError("seek_type is not available")
> + return self._pos.type
> +
> + def _set_type(self, seek_type):
> + if not __can_access:
> + raise AttributeError("seek_type is not available")
> + self._pos.type = seek_type
> +
> + def _get_time(self):
> + if not __can_access:
> + raise AttributeError("seek_time is not available")
> +
> + elif self._pos.type is not SEEK_TIME:
> + raise TypeError("seek_type is not SEEK_TIME")
> +
> + return self._pos.u.seek_time
> +
> + def _set_time(self, time):
> + if not __can_access:
> + raise AttributeError("seek_time is not available")
> +
> + elif self._pos.type is not SEEK_TIME:
> + raise TypeError("seek_type is not SEEK_TIME")
> +
> + self._pos.u.seek_time = time
> +
> + def _get_pos(self):
> + return self._pos
> +
> +
> + seek_type = property(_get_type, _set_type)
> + seek_time = property(_get_time, _set_time)
> +
> +
> +class Iterator:
> +
> + __with_init = False
> +
> + def __init__(self, context, begin_pos = None, end_pos = None, _no_init = None):
> + """
> + Allocate a trace collection iterator.
> +
> + begin_pos and end_pos are optional parameters to specify the
> + position at which the trace collection should be seeked upon
> + iterator creation, and the position at which iteration will
> + start returning "EOF".
> +
> + By default, if begin_pos is None, a BT_SEEK_CUR is performed at
> + creation. By default, if end_pos is None, a BT_SEEK_END (end of
> + trace) is the EOF criterion.
> + """
> + if _no_init is None:
> + if begin_pos is None:
> + bp = None
> + else:
> + try:
> + bp = begin_pos._pos
> + except AttributeError:
> + raise TypeError("in __init__, "
> + "argument 3 must be a IterPos instance")
> +
> + if end_pos is None:
> + ep = None
> + else:
> + try:
> + ep = end_pos._pos
> + except AttributeError:
> + raise TypeError("in __init__, "
> + "argument 4 must be a IterPos instance")
> +
> + try:
> + self._bi = _bt_iter_create(context._c, bp, ep)
> + except AttributeError:
> + raise TypeError("in __init__, "
> + "argument 2 must be a Context instance")
> +
> + self.__with_init = True
> +
> + else:
> + self._bi = _no_init
> +
> + def __del__(self):
> + if self.__with_init:
> + _bt_iter_destroy(self._bi)
> +
> + def next(self):
> + """
> + Move trace collection position to the next event.
> + Returns 0 on success, a negative value on error.
> + """
> + return _bt_iter_next(self._bi)
> +
> + def get_pos(self):
> + """Return a IterPos class of the current iterator position."""
> + ret = IterPos(0)
> + ret.__can_access = False
> + ret._pos = _bt_iter_get_pos(self._bi)
> + return ret
> +
> + def set_pos(self, pos):
> + """
> + Move the iterator to a given position.
> +
> + On error, the stream_heap is reinitialized and returned empty.
> + Return 0 for success.
> + Return EOF if the position requested is after the last event of the
> + trace collection.
> + Return -EINVAL when called with invalid parameter.
> + Return -ENOMEM if the stream_heap could not be properly initialized.
> + """
> + try:
> + return _bt_iter_set_pos(self._bi, pos._pos)
> + except AttributeError:
> + raise TypeError("in set_pos, "
> + "argument 2 must be a IterPos instance")
> +
> + def create_time_pos(self, timestamp):
> + """
> + Create a position based on time
> + This function allocates and returns a new IterPos to be able to
> + restore an iterator position based on a timestamp.
> + """
> +
> + if timestamp < 0:
> + raise TypeError("timestamp must be an unsigned int")
> +
> + ret = IterPos(0)
> + ret.__can_access = False
> + ret._pos = _bt_iter_create_time_pos(self._bi, timestamp)
> + return ret
> +%}
> +
> +
> +/* =================================================================
> + CLOCK-TYPE.H
> + ¯¯¯¯¯¯¯¯¯¯¯¯
> + *** Enum copied from clock-type.h
> + All changes must also be made here
> +*/
> +%rename("CLOCK_CYCLES") BT_CLOCK_CYCLES;
> +%rename("CLOCK_REAL") BT_CLOCK_REAL;
> +
> +enum bt_clock_type {
> + BT_CLOCK_CYCLES = 0,
> + BT_CLOCK_REAL
> +};
> +
> +/* =================================================================
> + TRACE-HANDLE.H, TRACE-HANDLE-INTERNAL.H
> + ¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯
> +*/
> +
> +%rename("_bt_trace_handle_create") bt_trace_handle_create(struct bt_context *ctx);
> +%rename("_bt_trace_handle_destroy") bt_trace_handle_destroy(struct bt_trace_handle *bt);
> +struct bt_trace_handle *bt_trace_handle_create(struct bt_context *ctx);
> +void bt_trace_handle_destroy(struct bt_trace_handle *bt);
> +
> +%rename("_bt_trace_handle_get_path") bt_trace_handle_get_path(struct bt_context *ctx,
> + int handle_id);
> +%rename("_bt_trace_handle_get_timestamp_begin") bt_trace_handle_get_timestamp_begin(
> + struct bt_context *ctx, int handle_id, enum bt_clock_type type);
> +%rename("_bt_trace_handle_get_timestamp_end") bt_trace_handle_get_timestamp_end(
> + struct bt_context *ctx, int handle_id, enum bt_clock_type type);
> +%rename("_bt_trace_handle_get_id") bt_trace_handle_get_id(struct bt_trace_handle *th);
> +int bt_trace_handle_get_id(struct bt_trace_handle *th);
> +const char *bt_trace_handle_get_path(struct bt_context *ctx, int handle_id);
> +uint64_t bt_trace_handle_get_timestamp_begin(struct bt_context *ctx, int handle_id,
> + enum bt_clock_type type);
> +uint64_t bt_trace_handle_get_timestamp_end(struct bt_context *ctx, int handle_id,
> + enum bt_clock_type type);
> +
> +%rename("_bt_ctf_event_get_handle_id") bt_ctf_event_get_handle_id(
> + const struct bt_ctf_event *event);
> +int bt_ctf_event_get_handle_id(const struct bt_ctf_event *event);
> +
> +
> +%pythoncode%{
> +
> +class TraceHandle(object):
> + """
> + The TraceHandle allows the user to manipulate a trace file directly.
> + It is a unique identifier representing a trace file.
> + Do not instantiate.
> + """
> +
> + def __init__(self):
> + raise NotImplementedError("TraceHandle cannot be instantiated")
> +
> + def __repr__(self):
> + return "Babeltrace TraceHandle: trace_id('{}')".format(self._id)
> +
> + def get_id(self):
> + """Return the TraceHandle id."""
> + return self._id
> +
> + def get_path(self, context):
> + """Return the path of a TraceHandle."""
> + try:
> + return _bt_trace_handle_get_path(context._c, self._id)
> + except AttributeError:
> + raise TypeError("in get_path, "
> + "argument 2 must be a Context instance")
> +
> + def get_timestamp_begin(self, context, clock_type):
> + """Return the creation time of the buffers of a trace."""
> + try:
> + return _bt_trace_handle_get_timestamp_begin(
> + context._c, self._id,clock_type)
> + except AttributeError:
> + raise TypeError("in get_timestamp_begin, "
> + "argument 2 must be a Context instance")
> +
> + def get_timestamp_end(self, context, clock_type):
> + """Return the destruction timestamp of the buffers of a trace."""
> + try:
> + return _bt_trace_handle_get_timestamp_end(
> + context._c, self._id, clock_type)
> + except AttributeError:
> + raise TypeError("in get_timestamp_end, "
> + "argument 2 must be a Context instance")
> +
> +%}
> +
> +
> +
> +// =================================================================
> +// CTF
> +// =================================================================
> +
> +/* =================================================================
> + ITERATOR.H, EVENTS.H
> + ¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯
> +*/
> +
> +//Iterator
> +%rename("_bt_ctf_iter_create") bt_ctf_iter_create(struct bt_context *ctx,
> + const struct bt_iter_pos *begin_pos,
> + const struct bt_iter_pos *end_pos);
> +%rename("_bt_ctf_get_iter") bt_ctf_get_iter(struct bt_ctf_iter *iter);
> +%rename("_bt_ctf_iter_destroy") bt_ctf_iter_destroy(struct bt_ctf_iter *iter);
> +%rename("_bt_ctf_iter_read_event") bt_ctf_iter_read_event(struct bt_ctf_iter *iter);
> +
> +struct bt_ctf_iter *bt_ctf_iter_create(struct bt_context *ctx,
> + const struct bt_iter_pos *begin_pos,
> + const struct bt_iter_pos *end_pos);
> +struct bt_iter *bt_ctf_get_iter(struct bt_ctf_iter *iter);
> +void bt_ctf_iter_destroy(struct bt_ctf_iter *iter);
> +struct bt_ctf_event *bt_ctf_iter_read_event(struct bt_ctf_iter *iter);
> +
> +
> +//Events
> +
> +%rename("_bt_ctf_get_top_level_scope") bt_ctf_get_top_level_scope(const struct
> + bt_ctf_event *event, enum bt_ctf_scope scope);
> +%rename("_bt_ctf_event_name") bt_ctf_event_name(const struct bt_ctf_event *ctf_event);
> +%rename("_bt_ctf_get_timestamp") bt_ctf_get_timestamp(
> + const struct bt_ctf_event *ctf_event);
> +%rename("_bt_ctf_get_cycles") bt_ctf_get_cycles(
> + const struct bt_ctf_event *ctf_event);
> +
> +%rename("_bt_ctf_get_field") bt_ctf_get_field(const struct bt_ctf_event *ctf_event,
> + const struct definition *scope, const char *field);
> +%rename("_bt_ctf_get_index") bt_ctf_get_index(const struct bt_ctf_event *ctf_event,
> + const struct definition *field, unsigned int index);
> +%rename("_bt_ctf_field_name") bt_ctf_field_name(const struct definition *def);
> +%rename("_bt_ctf_field_type") bt_ctf_field_type(const struct definition *def);
> +%rename("_bt_ctf_get_int_signedness") bt_ctf_get_int_signedness(
> + const struct definition *field);
> +%rename("_bt_ctf_get_int_base") bt_ctf_get_int_base(const struct definition *field);
> +%rename("_bt_ctf_get_int_byte_order") bt_ctf_get_int_byte_order(
> + const struct definition *field);
> +%rename("_bt_ctf_get_int_len") bt_ctf_get_int_len(const struct definition *field);
> +%rename("_bt_ctf_get_encoding") bt_ctf_get_encoding(const struct definition *field);
> +%rename("_bt_ctf_get_array_len") bt_ctf_get_array_len(const struct definition *field);
> +%rename("_bt_ctf_get_uint64") bt_ctf_get_uint64(const struct definition *field);
> +%rename("_bt_ctf_get_int64") bt_ctf_get_int64(const struct definition *field);
> +%rename("_bt_ctf_get_char_array") bt_ctf_get_char_array(const struct definition *field);
> +%rename("_bt_ctf_get_string") bt_ctf_get_string(const struct definition *field);
> +%rename("_bt_ctf_field_get_error") bt_ctf_field_get_error(void);
> +%rename("_bt_ctf_get_decl_event_name") bt_ctf_get_decl_event_name(const struct
> + bt_ctf_event_decl *event);
> +%rename("_bt_ctf_get_decl_field_name") bt_ctf_get_decl_field_name(
> + const struct bt_ctf_field_decl *field);
> +
> +const struct definition *bt_ctf_get_top_level_scope(const struct bt_ctf_event *ctf_event,
> + enum bt_ctf_scope scope);
> +const char *bt_ctf_event_name(const struct bt_ctf_event *ctf_event);
> +uint64_t bt_ctf_get_timestamp(const struct bt_ctf_event *ctf_event);
> +uint64_t bt_ctf_get_cycles(const struct bt_ctf_event *ctf_event);
> +const struct definition *bt_ctf_get_field(const struct bt_ctf_event *ctf_event,
> + const struct definition *scope,
> + const char *field);
> +const struct definition *bt_ctf_get_index(const struct bt_ctf_event *ctf_event,
> + const struct definition *field,
> + unsigned int index);
> +const char *bt_ctf_field_name(const struct definition *def);
> +enum ctf_type_id bt_ctf_field_type(const struct definition *def);
> +int bt_ctf_get_int_signedness(const struct definition *field);
> +int bt_ctf_get_int_base(const struct definition *field);
> +int bt_ctf_get_int_byte_order(const struct definition *field);
> +ssize_t bt_ctf_get_int_len(const struct definition *field);
> +enum ctf_string_encoding bt_ctf_get_encoding(const struct definition *field);
> +int bt_ctf_get_array_len(const struct definition *field);
> +uint64_t bt_ctf_get_uint64(const struct definition *field);
> +int64_t bt_ctf_get_int64(const struct definition *field);
> +char *bt_ctf_get_char_array(const struct definition *field);
> +char *bt_ctf_get_string(const struct definition *field);
> +int bt_ctf_field_get_error(void);
> +const char *bt_ctf_get_decl_event_name(const struct bt_ctf_event_decl *event);
> +const char *bt_ctf_get_decl_field_name(const struct bt_ctf_field_decl *field);
> +
> +%pythoncode%{
> +
> +class ctf:
> +
> + #enum equivalent, accessible constants
> + #These are taken directly from ctf/events.h
> + #All changes to enums must also be made here
> + class type_id:
> + UNKNOWN = 0
> + INTEGER = 1
> + FLOAT = 2
> + ENUM = 3
> + STRING = 4
> + STRUCT = 5
> + UNTAGGED_VARIANT = 6
> + VARIANT = 7
> + ARRAY = 8
> + SEQUENCE = 9
> + NR_CTF_TYPES = 10
> +
> + class scope:
> + TRACE_PACKET_HEADER = 0
> + STREAM_PACKET_CONTEXT = 1
> + STREAM_EVENT_HEADER = 2
> + STREAM_EVENT_CONTEXT = 3
> + EVENT_CONTEXT = 4
> + EVENT_FIELDS = 5
> +
> + class string_encoding:
> + NONE = 0
> + UTF8 = 1
> + ASCII = 2
> + UNKNOWN = 3
> +
> + class Iterator(Iterator, object):
> + """
> + Allocate a CTF trace collection iterator.
> +
> + begin_pos and end_pos are optional parameters to specify the
> + position at which the trace collection should be seeked upon
> + iterator creation, and the position at which iteration will
> + start returning "EOF".
> +
> + By default, if begin_pos is None, a SEEK_CUR is performed at
> + creation. By default, if end_pos is None, a SEEK_END (end of
> + trace) is the EOF criterion.
> +
> + Only one iterator can be created against a context. If more than one
> + iterator is being created for the same context, the second creation
> + will return None. The previous iterator must be destroyed before
> + creation of the new iterator for this function to succeed.
> + """
> +
> + def __new__(cls, context, begin_pos = None, end_pos = None):
> + # __new__ is used to control the return value
> + # as the ctf.Iterator class should return None
> + # if bt_ctf_iter_create returns NULL
> +
> + if begin_pos is None:
> + bp = None
> + else:
> + bp = begin_pos._pos
> + if end_pos is None:
> + ep = None
> + else:
> + ep = end_pos._pos
> + try:
> + it = _bt_ctf_iter_create(context._c, bp, ep)
> + except AttributeError:
> + raise TypeError("in __init__, "
> + "argument 2 must be a Context instance")
> + if it is None:
> + return None
> +
> + ret_class = super(ctf.Iterator, cls).__new__(cls)
> + ret_class._i = it
> + return ret_class
> +
> + def __init__(self, context, begin_pos = None, end_pos = None):
> + Iterator.__init__(self, None, None, None,
> + _bt_ctf_get_iter(self._i))
> +
> + def __del__(self):
> + _bt_ctf_iter_destroy(self._i)
> +
> + def read_event(self):
> + """
> + Read the iterator's current event data.
> + Return current event on success, None on end of trace.
> + """
> + ret = _bt_ctf_iter_read_event(self._i)
> + if ret is None:
> + return ret
> + ev = ctf.Event.__new__(ctf.Event)
> + ev._e = ret
> + return ev
> +
> +
> + class Event(object):
> + """
> + This class represents an event from the trace.
> + It is obtained with read_event() from ctf.Iterator.
> + Do not instantiate.
> + """
> +
> + def __init__(self):
> + raise NotImplementedError("ctf.Event cannot be instantiated")
> +
> + def get_top_level_scope(self, scope):
> + """
> + Return a definition of the top-level scope
> + Top-level scopes are defined in ctf.scope.
> + In order to get a field or a field list, the user needs to pass a
> + scope as argument, this scope can be a top-level scope or a scope
> + relative to an arbitrary field. This function provides the mapping
> + between the scope and the actual definition of top-level scopes.
> + On error return None.
> + """
> + evDef = ctf.Definition.__new__(ctf.Definition)
> + evDef._d = _bt_ctf_get_top_level_scope(self._e, scope)
> + if evDef._d is None:
> + return None
> + return evDef
> +
> + def get_name(self):
> + """Return the name of the event or None on error."""
> + return _bt_ctf_event_name(self._e)
> +
> + def get_cycles(self):
> + """
> + Return the timestamp of the event as written in
> + the packet (in cycles) or -1ULL on error.
> + """
> + return _bt_ctf_get_cycles(self._e)
> +
> + def get_timestamp(self):
> + """
> + Return the timestamp of the event offsetted with the
> + system clock source or -1ULL on error.
> + """
> + return _bt_ctf_get_timestamp(self._e)
> +
> + def get_field(self, scope, field):
> + """Return the definition of a specific field."""
> + evDef = ctf.Definition.__new__(ctf.Definition)
> + try:
> + evDef._d = _bt_ctf_get_field(self._e, scope._d, field)
> + except AttributeError:
> + raise TypeError("in get_field, argument 2 must be a "
> + "Definition (scope) instance")
> + return evDef
> +
> + def get_field_list(self, scope):
> + """
> + Return a list of Definitions
> + Return None on error.
> + """
> + try:
> + field_lc = _bt_python_field_listcaller(self._e, scope._d)
> + except AttributeError:
> + raise TypeError("in get_field_list, argument 2 must be a "
> + "Definition (scope) instance")
> +
> + if field_lc is None:
> + return None
> +
> + def_list = []
> + i = 0
> + while True:
> + tmp = ctf.Definition.__new__(ctf.Definition)
> + tmp._d = _bt_python_field_one_from_list(field_lc, i)
> +
> + if tmp._d is None:
> + #Last item of list is None, assured in
> + #_bt_python_field_listcaller
> + break
> +
> + def_list.append(tmp)
> + i += 1
> + return def_list
> +
> + def get_index(self, field, index):
> + """
> + If the field is an array or a sequence, return the element
> + at position index, otherwise return None
> + """
> + evDef = ctf.Definition.__new__(ctf.Definition)
> + try:
> + evDef._d = _bt_ctf_get_index(self._e, field._d, index)
> + except AttributeError:
> + raise TypeError("in get_index, argument 2 must be a "
> + "Definition (field) instance")
> +
> + if evDef._d is None:
> + return None
> + return evDef
> +
> + def get_handle(self):
> + """
> + Get the TraceHandle associated with an event
> + Return None on error
> + """
> + ret = _bt_ctf_event_get_handle_id(self._e)
> + if ret < 0:
> + return None
> +
> + th = TraceHandle.__new__(TraceHandle)
> + th._id = ret
> + return th
> +
> + def get_context(self):
> + """
> + Get the context associated with an event.
> + Return None on error.
> + """
> + ctx = Context()
> + ctx._c = _bt_ctf_event_get_context(self._e);
> + if ctx._c is None:
> + return None
> + else:
> + return ctx
> +
> +
> + class Definition(object):
> + """Definition class. Do not instantiate."""
> +
> + def __init__(self):
> + raise NotImplementedError("ctf.Definition cannot be instantiated")
> +
> + def __repr__(self):
> + return "Babeltrace Definition: name('{}'), type({})".format(
> + self.field_name(), self.field_type())
> +
> + def field_name(self):
> + """Return the name of a field or None on error."""
> + return _bt_ctf_field_name(self._d)
> +
> + def field_type(self):
> + """Return the type of a field or -1 if unknown."""
> + return _bt_ctf_field_type(self._d)
> +
> + def get_int_signedness(self):
> + """
> + Return the signedness of an integer:
> + 0 if unsigned; 1 if signed; -1 on error.
> + """
> + return _bt_ctf_get_int_signedness(self._d)
> +
> + def get_int_base(self):
> + """Return the base of an int or a negative value on error."""
> + return _bt_ctf_get_int_base(self._d)
> +
> + def get_int_byte_order(self):
> + """
> + Return the byte order of an int or a negative
> + value on error.
> + """
> + return _bt_ctf_get_int_byte_order(self._d)
> +
> + def get_int_len(self):
> + """
> + Return the size, in bits, of an int or a negative
> + value on error.
> + """
> + return _bt_ctf_get_int_len(self._d)
> +
> + def get_encoding(self):
> + """
> + Return the encoding of an int or a string.
> + Return a negative value on error.
> + """
> + return _bt_ctf_get_encoding(self._d)
> +
> + def get_array_len(self):
> + """
> + Return the len of an array or a negative
> + value on error.
> + """
> + return _bt_ctf_get_array_len(self._d)
> +
> + def get_uint64(self):
> + """
> + Return the value associated with the field.
> + If the field does not exist or is not of the type requested,
> + the value returned is undefined. To check if an error occured,
> + use the ctf.field_error() function after accessing a field.
> + """
> + return _bt_ctf_get_uint64(self._d)
> +
> + def get_int64(self):
> + """
> + Return the value associated with the field.
> + If the field does not exist or is not of the type requested,
> + the value returned is undefined. To check if an error occured,
> + use the ctf.field_error() function after accessing a field.
> + """
> + return _bt_ctf_get_int64(self._d)
> +
> + def get_char_array(self):
> + """
> + Return the value associated with the field.
> + If the field does not exist or is not of the type requested,
> + the value returned is undefined. To check if an error occured,
> + use the ctf.field_error() function after accessing a field.
> + """
> + return _bt_ctf_get_char_array(self._d)
> +
> + def get_str(self):
> + """
> + Return the value associated with the field.
> + If the field does not exist or is not of the type requested,
> + the value returned is undefined. To check if an error occured,
> + use the ctf.field_error() function after accessing a field.
> + """
> + return _bt_ctf_get_string(self._d)
> +
> +
> + class EventDecl(object):
> + """Event declaration class. Do not instantiate."""
> +
> + def __init__(self):
> + raise NotImplementedError("ctf.EventDecl cannot be instantiated")
> +
> + def __repr__(self):
> + return "Babeltrace EventDecl: name {}".format(self.get_name())
> +
> + def get_name(self):
> + """Return the name of the event or None on error"""
> + return _bt_ctf_get_decl_event_name(self._d)
> +
> + def get_decl_fields(self, scope):
> + """
> + Return a list of ctf.FieldDecl
> + Return None on error.
> + """
> + ptr_list = _by_python_field_decl_listcaller(self._d, scope)
> +
> + if ptr_list is None:
> + return None
> +
> + decl_list = []
> + i = 0
> + while True:
> + tmp = ctf.FieldDecl.__new__(ctf.FieldDecl)
> + tmp._d = _bt_python_field_decl_one_from_list(
> + ptr_list, i)
> +
> + if tmp._d is None:
> + #Last item of list is None
> + break
> +
> + decl_list.append(tmp)
> + i += 1
> + return decl_list
> +
> +
> + class FieldDecl(object):
> + """Field declaration class. Do not instantiate."""
> +
> + def __init__(self):
> + raise NotImplementedError("ctf.FieldDecl cannot be instantiated")
> +
> + def __repr__(self):
> + return "Babeltrace FieldDecl: name {}".format(self.get_name())
> +
> + def get_name(self):
> + """Return the name of a FieldDecl or None on error"""
> + return _bt_ctf_get_decl_field_name(self._d)
> +
> +
> + @staticmethod
> + def field_error():
> + """
> + Return the last error code encountered while
> + accessing a field and reset the error flag.
> + Return 0 if no error, a negative value otherwise.
> + """
> + return _bt_ctf_field_get_error()
> +
> + @staticmethod
> + def get_event_decl_list(trace_handle, context):
> + """
> + Return a list of ctf.EventDecl
> + Return None on error.
> + """
> + try:
> + handle_id = trace_handle._id
> + except AttributeError:
> + raise TypeError("in get_event_decl_list, "
> + "argument 1 must be a TraceHandle instance")
> + try:
> + ptr_list = _bt_python_event_decl_listcaller(handle_id, context._c)
> + except AttributeError:
> + raise TypeError("in get_event_decl_list, "
> + "argument 2 must be a Context instance")
> +
> + if ptr_list is None:
> + return None
> +
> + decl_list = []
> + i = 0
> + while True:
> + tmp = ctf.EventDecl.__new__(ctf.EventDecl)
> + tmp._d = _bt_python_decl_one_from_list(ptr_list, i)
> +
> + if tmp._d is None:
> + #Last item of list is None
> + break
> +
> + decl_list.append(tmp)
> + i += 1
> + return decl_list
> +
> +%}
> +
> +
> +
> +// =================================================================
> +// NEW FUNCTIONS
> +// File and list-related
> +// python-complements.h
> +// =================================================================
> +
> +%include python-complements.c
> +
> +%pythoncode %{
> +
> +class File(object):
> + """
> + Open a file for babeltrace.
> +
> + file_path is a string containing the path or None to use the
> + standard output in writing mode.
> +
> + The mode can be 'r', 'w' or 'a' for reading (default), writing or
> + appending. The file will be created if it doesn't exist when
> + opened for writing or appending; it will be truncated when opened
> + for writing. Add a 'b' to the mode for binary files. Add a '+'
> + to the mode to allow simultaneous reading and writing.
> + """
> +
> + def __new__(cls, file_path, mode='r'):
> + # __new__ is used to control the return value
> + # as the File class should return None
> + # if _bt_file_open returns NULL
> +
> + # Type check
> + if file_path is not None and type(file_path) is not str:
> + raise TypeError("in method __init__, argument 2 of type 'str'")
> + if type(mode) is not str:
> + raise TypeError("in method __init__, argument 3 of type 'str'")
> +
> + # Opening file
> + file_ptr = _bt_file_open(file_path, mode)
> + if file_ptr is None:
> + return None
> +
> + # Class instantiation
> + file_inst = super(File, cls).__new__(cls)
> + file_inst._file = file_ptr
> + return file_inst
> +
> + def __init__(self, file_path, mode='r'):
> + self._opened = True
> + self._use_stdout = False
> +
> + if file_path is None:
> + # use stdout
> + file_path = "stdout"
> + mode = 'w'
> + self._use_stdout = True
> +
> + self._file_path = file_path
> + self._mode = mode
> +
> + def __del__(self):
> + self.close()
> +
> + def __repr__(self):
> + if self._opened:
> + stat = 'opened'
> + else:
> + stat = 'closed'
> + return "{} babeltrace File; file_path('{}'), mode('{}')".format(
> + stat, self._file_path, self._mode)
> +
> + def close(self):
> + """Close the file. Is also called using del."""
> + if self._opened and not self._use_stdout:
> + _bt_file_close(self._file)
> + self._opened = False
> +%}
> diff --git a/bindings/python/examples/babeltrace_and_lttng.py b/bindings/python/examples/babeltrace_and_lttng.py
> new file mode 100644
> index 0000000..ef0e35c
> --- /dev/null
> +++ b/bindings/python/examples/babeltrace_and_lttng.py
> @@ -0,0 +1,107 @@
> +# This script uses both lttng-tools and babeltrace
> +# python modules. It creates a session, enables
> +# events, starts tracing for 2 seconds, stops tracing,
> +# destroys the session and outputs the trace in the
> +# specified output file.
> +#
> +# WARNING: will destroy any existing trace having
> +# the same name as ses_name
> +
> +
> +# ------------------------------------------------------
> +ses_name = "babeltrace-lttng-test"
> +trace_path = "/lttng-traces/babeltrace-lttng-trace/"
> +out_file = "babeltrace-lttng-trace-text-output.txt"
> +# ------------------------------------------------------
> +
> +
> +import time
> +try:
> + import babeltrace, lttng
> +except ImportError:
> + raise ImportError( "both babeltrace and lttng-tools "
> + "python modules must be installed" )
> +
> +
> +# Errors to raise if something goes wrong
> +class LTTngError(Exception):
> + pass
> +class BabeltraceError(Exception):
> + pass
> +
> +
> +# LTTNG-TOOLS
> +
> +# Making sure session does not already exist
> +lttng.destroy(ses_name)
> +
> +# Creating a new session and handle
> +ret = lttng.create(ses_name,trace_path)
> +if ret < 0:
> + raise LTTngError(lttng.strerror(ret))
> +
> +han = None
> +han = lttng.Handle(ses_name, lttng.Domain())
> +if han is None:
> + raise LTTngError("Handle not created")
> +
> +
> +# Enabling all events
> +ret = lttng.enable_event(han, lttng.Event(), None)
> +if ret < 0:
> + raise LTTngError(lttng.strerror(ret))
> +
> +
> +# Start, wait, stop
> +ret = lttng.start(ses_name)
> +if ret < 0:
> + raise LTTngError(lttng.strerror(ret))
> +print("Tracing...")
> +time.sleep(2)
> +print("Stopped.")
> +ret = lttng.stop(ses_name)
> +if ret < 0:
> + raise LTTngError(lttng.strerror(ret))
> +
> +
> +# Destroying tracing session
> +ret = lttng.destroy(ses_name)
> +if ret < 0:
> + raise LTTngError(lttng.strerror(ret))
> +
> +
> +# BABELTRACE
> +
> +# Create context and add trace:
> +ctx = babeltrace.Context()
> +ret = ctx.add_trace(trace_path + "/kernel", "ctf")
> +if ret is None:
> + raise BabeltraceError("Error adding trace")
> +
> +# Iterator setup
> +bp = babeltrace.IterPos(babeltrace.SEEK_BEGIN)
> +ctf_it = babeltrace.ctf.Iterator(ctx,bp)
> +
> +# Reading events from trace
> +# and outputting timestamps and event names
> +# in out_file
> +print("Writing trace file...")
> +output = open(out_file, "wt")
> +
> +event = ctf_it.read_event()
> +while(event is not None):
> + output.write("TS: {}, {} : {}\n".format(event.get_timestamp(),
> + event.get_cycles(), event.get_name()))
> +
> + # Next event
> + ret = ctf_it.next()
> + if ret < 0:
> + break
> + event = ctf_it.read_event()
> +
> +# Closing file
> +output.close()
> +
> +# Destroying dynamic elements
> +del ctf_it, han
> +print("Done.")
> diff --git a/bindings/python/examples/eventcount.py b/bindings/python/examples/eventcount.py
> new file mode 100644
> index 0000000..2a63b74
> --- /dev/null
> +++ b/bindings/python/examples/eventcount.py
> @@ -0,0 +1,66 @@
> +# The script prints a count of specified events and
> +# their related tid's in a given trace.
> +# The trace needs TID context (lttng add-context -k -t tid)
> +
> +import sys
> +from babeltrace import *
> +from output_format_modules.pprint_table import pprint_table as pprint
> +
> +if len(sys.argv) < 3:
> + raise TypeError("Usage: python eventcount.py event1 [event2 ...] path/to/trace")
> +
> +ctx = Context()
> +ret = ctx.add_trace(sys.argv[len(sys.argv)-1], "ctf")
> +if ret is None:
> + raise IOError("Error adding trace")
> +
> +counts = {}
> +
> +# Setting iterator
> +bp = IterPos(SEEK_BEGIN)
> +ctf_it = ctf.Iterator(ctx, bp)
> +
> +# Reading events
> +event = ctf_it.read_event()
> +while(event is not None):
> + for event_type in sys.argv[1:len(sys.argv)-1]:
> + if event_type == event.get_name():
> +
> + # Getting scope definition
> + sco = event.get_top_level_scope(ctf.scope.STREAM_EVENT_CONTEXT)
> + if sco is None:
> + print("ERROR: Cannot get definition scope for {}".format(
> + event.get_name()))
> + continue
> +
> + # Getting TID
> + tid_field = event.get_field(sco, "_tid")
> + tid = tid_field.get_int64()
> +
> + if ctf.field_error():
> + print("ERROR: Missing TID info for {}".format(
> + event.get_name()))
> + continue
> +
> + tmp = (tid, event.get_name())
> +
> + if tmp in counts:
> + counts[tmp] += 1
> + else:
> + counts[tmp] = 1
> +
> + # Next event
> + ret = ctf_it.next()
> + if ret < 0:
> + break
> + event = ctf_it.read_event()
> +
> +del ctf_it
> +
> +# Appending data to table for output
> +table = []
> +for item in counts:
> + table.append([item[0], item[1], counts[item]])
> +table = sorted(table)
> +table.insert(0,["TID", "EVENT", "COUNT"])
> +pprint(table, 2)
> diff --git a/bindings/python/examples/eventcountlist.py b/bindings/python/examples/eventcountlist.py
> new file mode 100644
> index 0000000..800996c
> --- /dev/null
> +++ b/bindings/python/examples/eventcountlist.py
> @@ -0,0 +1,65 @@
> +# The script prints a count and rate of events.
> +# It also outputs a bar graph of count per event, using the cairoplot module.
> +
> +import sys
> +from babeltrace import *
> +from output_format_modules import cairoplot
> +from output_format_modules.pprint_table import pprint_table as pprint
> +
> +# Check for path arg:
> +if len(sys.argv) < 2:
> + raise TypeError("Usage: python eventcountlist.py path/to/trace")
> +
> +ctx = Context()
> +ret = ctx.add_trace(sys.argv[1], "ctf")
> +if ret is None:
> + raise IOError("Error adding trace")
> +
> +# Events and their assossiated count
> +# will be stored as a dict:
> +events_count = {}
> +
> +# Setting iterator:
> +bp = IterPos(SEEK_BEGIN)
> +ctf_it = ctf.Iterator(ctx,bp)
> +
> +prev_event = None
> +event = ctf_it.read_event()
> +
> +start_time = event.get_timestamp()
> +
> +# Reading events:
> +while(event is not None):
> + if event.get_name() in events_count:
> + events_count[event.get_name()] += 1
> + else:
> + events_count[event.get_name()] = 1
> +
> + ret = ctf_it.next()
> + if ret < 0:
> + break
> + else:
> + prev_event = event
> + event = ctf_it.read_event()
> +
> +if event:
> + total_time = event.get_timestamp() - start_time
> +else:
> + total_time = prev_event.get_timestamp() - start_time
> +
> +del ctf_it
> +
> +# Printing encountered events with respective count and rate:
> +print("Total time: {} ns".format(total_time))
> +table = [["EVENT", "COUNT", "RATE (Hz)"]]
> +for item in sorted(events_count.iterkeys()):
> + tmp = [item, events_count[item],
> + events_count[item]/(total_time/1000000000.0)]
> + table.append(tmp)
> +pprint(table)
> +
> +# Exporting data as bar graph
> +cairoplot.vertical_bar_plot ( 'eventcountlist.svg', events_count, 50+85*len(events_count),
> + 800, border = 20, display_values = True, grid = True,
> + rounded_corners = True,
> + x_labels = sorted(events_count.keys()) )
> diff --git a/bindings/python/examples/events_per_cpu.py b/bindings/python/examples/events_per_cpu.py
> new file mode 100644
> index 0000000..09cf0e3
> --- /dev/null
> +++ b/bindings/python/examples/events_per_cpu.py
> @@ -0,0 +1,81 @@
> +# The script opens a trace and prints out CPU statistics
> +# for the given trace (event count per CPU, total active
> +# time and % of time processing events).
> +# It also outputs a .txt file showing each time interval
> +# (since the beginning of the trace) in which each CPU
> +# was active and the corresponding event.
> +
> +import sys, multiprocessing
> +from output_format_modules.pprint_table import pprint_table as pprint
> +from babeltrace import *
> +
> +if len(sys.argv) < 2:
> + raise TypeError("Usage: python events_per_cpu.py path/to/trace")
> +
> +# Adding trace
> +ctx = Context()
> +ret = ctx.add_trace(sys.argv[1], "ctf")
> +if ret is None:
> + raise IOError("Error adding trace")
> +
> +cpu_usage = []
> +nbEvents = 0
> +i = 0
> +while i < multiprocessing.cpu_count():
> + cpu_usage.append([])
> + i += 1
> +
> +# Setting iterator
> +bp = IterPos(SEEK_BEGIN)
> +ctf_it = ctf.Iterator(ctx, bp)
> +
> +# Reading events
> +event = ctf_it.read_event()
> +start_time = event.get_timestamp()
> +
> +while(event is not None):
> +
> + event_name = event.get_name()
> + ts = event.get_timestamp()
> +
> + # Getting cpu_id
> + scope = event.get_top_level_scope(ctf.scope.STREAM_PACKET_CONTEXT)
> + field = event.get_field(scope, "cpu_id")
> + cpu_id = field.get_uint64()
> + if ctf.field_error():
> + print("ERROR: Missing cpu_id info for {}".format(event.get_name()))
> + else:
> + cpu_usage[cpu_id].append( (int(ts), event_name) )
> + nbEvents += 1
> +
> + # Next Event
> + ret = ctf_it.next()
> + if ret < 0:
> + break
> + event = ctf_it.read_event()
> +
> +
> +# Outputting
> +table = []
> +output = open("events_per_cpu.txt", "wt")
> +output.write("(timestamp, event)\n")
> +
> +for cpu in range(len(cpu_usage)):
> + # Setting table
> + event_str = str(100.0 * len(cpu_usage[cpu]) / nbEvents) + '000'
> + # % is printed with 2 decimals
> + table.append([cpu, len(cpu_usage[cpu]), event_str[0:event_str.find('.') + 3] + ' %'])
> +
> + # Writing to file
> + output.write("\n\n\n----------------------\n")
> + output.write("CPU {}\n\n".format(cpu))
> + for event in cpu_usage[cpu]:
> + output.write(str(event) + '\n')
> +
> +# Printing table
> +table.insert(0, ["CPU ID", "EVENT COUNT", "TRACE EVENT %"])
> +pprint(table)
> +print("Total event count: {}".format(nbEvents))
> +print("Total trace time: {} ns".format(ts - start_time))
> +
> +output.close()
> diff --git a/bindings/python/examples/example-api-test.py b/bindings/python/examples/example-api-test.py
> new file mode 100644
> index 0000000..0cfc3ed
> --- /dev/null
> +++ b/bindings/python/examples/example-api-test.py
> @@ -0,0 +1,59 @@
> +# This example uses the babeltrace python module
> +# to partially test the api.
> +
> +import sys
> +from babeltrace import *
> +
> +# Check for path arg:
> +if len(sys.argv) < 2:
> + raise TypeError("Usage: python example-api-test.py path/to/file")
> +
> +# Create context and add trace:
> +ctx = Context()
> +trace_handle = ctx.add_trace(sys.argv[1], "ctf")
> +if trace_handle is None:
> + raise IOError("Error adding trace")
> +
> +# Listing events
> +lst = ctf.get_event_decl_list(trace_handle, ctx)
> +print("--- Event list ---")
> +for item in lst:
> + print("event : {}".format(item.get_name()))
> +print("--- Done ---")
> +
> +# Iter trace
> +bp = IterPos(SEEK_BEGIN)
> +ctf_it = ctf.Iterator(ctx,bp)
> +event = ctf_it.read_event()
> +
> +while(event is not None):
> + print("TS: {}, {} : {}".format(event.get_timestamp(),
> + event.get_cycles(), event.get_name()))
> +
> + if event.get_name() == "sched_switch":
> + sco = event.get_top_level_scope(ctf.scope.EVENT_FIELDS)
> + prev_field = event.get_field(sco, "_prev_comm")
> + prev_comm = prev_field.get_char_array()
> +
> + if ctf.field_error():
> + print("ERROR: Missing prev_comm context info")
> + else:
> + print("sched_switch prev_comm: {}".format(prev_comm))
> +
> + if event.get_name() == "exit_syscall":
> + sco = event.get_top_level_scope(ctf.scope.EVENT_FIELDS)
> + ret_field = event.get_field(sco, "_ret")
> + ret_code = ret_field.get_int64()
> +
> + if ctf.field_error():
> + print("ERROR: Unable to extract ret")
> + else:
> + print("exit_syscall ret: {}".format(ret_code))
> +
> + ret = ctf_it.next()
> + if ret < 0:
> + break
> + else:
> + event = ctf_it.read_event()
> +
> +del ctf_it
> diff --git a/bindings/python/examples/histogram.py b/bindings/python/examples/histogram.py
> new file mode 100644
> index 0000000..a24c00d
> --- /dev/null
> +++ b/bindings/python/examples/histogram.py
> @@ -0,0 +1,121 @@
> +# The script checks the number of events in the trace
> +# and outputs a table and a .svg histogram for the specified
> +# range (microseconds) or the total trace if no range specified.
> +# The graph is generated using the cairoplot module.
> +
> +import sys
> +from babeltrace import *
> +from output_format_modules import cairoplot
> +from output_format_modules.pprint_table import pprint_table as pprint
> +
> +# ------------------------------------------------
> +# Output settings
> +
> +# number of intervals:
> +nbDiv = 25 # Should not be over 150
> + # for usable graph output
> +
> +# table output stream (file-like object):
> +out = sys.stdout
> +# -------------------------------------------------
> +
> +if len(sys.argv) < 2 or len(sys.argv) > 4:
> + raise TypeError("Usage: python histogram.py [ start_time [end_time] ] path/to/trace")
> +
> +ctx = Context()
> +ret = ctx.add_trace(sys.argv[len(sys.argv)-1], "ctf")
> +if ret is None:
> + raise IOError("Error adding trace")
> +
> +# Check when to start/stop graphing
> +sinceBegin = True
> +beginTime = 0.0
> +if len(sys.argv) > 2:
> + sinceBegin = False
> + beginTime = float(sys.argv[1])
> +untilEnd = True
> +if len(sys.argv) == 4:
> + untilEnd = False
> +
> +# Setting iterator
> +bp = IterPos(SEEK_BEGIN)
> +ctf_it = ctf.Iterator(ctx, bp)
> +
> +# Reading events
> +event = ctf_it.read_event()
> +start_time = event.get_timestamp()
> +time = 0
> +count = {}
> +
> +while(event is not None):
> + # Microsec.
> + time = (event.get_timestamp() - start_time)/1000.0
> +
> + # Check if in range
> + if not sinceBegin:
> + if time < beginTime:
> + # Next Event
> + ret = ctf_it.next()
> + if ret < 0:
> + break
> + event = ctf_it.read_event()
> + continue
> + if not untilEnd:
> + if time > float(sys.argv[2]):
> + break
> +
> + # Counting events per timestamp:
> + if time in count:
> + count[time] += 1
> + else:
> + count[time] = 1
> +
> + # Next Event
> + ret = ctf_it.next()
> + if ret < 0:
> + break
> + event = ctf_it.read_event()
> +
> +del ctf_it
> +
> +# Setting data for output
> +interval = (time - beginTime)/nbDiv
> +div_begin_time = beginTime
> +div_end_time = beginTime + interval
> +data = {}
> +
> +# Prefix for string sorting, considering
> +# there should not be over 150 intervals.
> +# This would work up to 9999 intervals.
> +# If needed, add zeros.
> +prefix = 0.0001
> +
> +while div_end_time <= time:
> + key = str(prefix) + '[' + str(div_begin_time) + ';' + str(div_end_time) + '['
> + for tmp in count:
> + if tmp >= div_begin_time and tmp < div_end_time:
> + if key in data:
> + data[key] += count[tmp]
> + else:
> + data[key] = count[tmp]
> + if not key in data:
> + data[key] = 0
> + div_begin_time = div_end_time
> + div_end_time += interval
> + # Prefix increment
> + prefix += 0.001
> +
> +table = []
> +x_labels = []
> +for key in sorted(data):
> + table.append([key[key.find('['):], data[key]])
> + x_labels.append(key[key.find('['):])
> +
> +# Table output
> +table.insert(0, ["INTERVAL (us)", "COUNT"])
> +pprint(table, 1, out)
> +
> +# Graph output
> +cairoplot.vertical_bar_plot ( 'histogram.svg', data, 50 + 150*nbDiv, 50*nbDiv,
> + border = 20, display_values = True, grid = True,
> + x_labels = x_labels, rounded_corners = True )
> diff --git a/bindings/python/examples/output_format_modules/__init__.py b/bindings/python/examples/output_format_modules/__init__.py
> new file mode 100644
> index 0000000..e69de29
> diff --git a/bindings/python/examples/output_format_modules/cairoplot.py b/bindings/python/examples/output_format_modules/cairoplot.py
> new file mode 100755
> index 0000000..a27113f
> --- /dev/null
> +++ b/bindings/python/examples/output_format_modules/cairoplot.py
> @@ -0,0 +1,2336 @@
> +#!/usr/bin/env python
> +# -*- coding: utf-8 -*-
> +
> +# CairoPlot.py
> +#
> +# Copyright (c) 2008 Rodrigo Moreira Araújo
> +#
> +# Author: Rodrigo Moreiro Araujo <alf.rodrigo at gmail.com>
> +#
> +# This program is free software; you can redistribute it and/or
> +# modify it under the terms of the GNU Lesser General Public License
> +# as published by the Free Software Foundation; either version 2 of
> +# the License, or (at your option) any later version.
> +#
> +# This program is distributed in the hope that it will be useful,
> +# but WITHOUT ANY WARRANTY; without even the implied warranty of
> +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
> +# GNU General Public License for more details.
> +#
> +# You should have received a copy of the GNU Lesser General Public
> +# License along with this program; if not, write to the Free Software
> +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
> +# USA
> +
> +#Contributor: João S. O. Bueno
> +
> +#TODO: review BarPlot Code
> +#TODO: x_label colision problem on Horizontal Bar Plot
> +#TODO: y_label's eat too much space on HBP
> +
> +
> +__version__ = 1.2
> +
> +import cairo
> +import math
> +import random
> +from series import Series, Group, Data
> +
> +HORZ = 0
> +VERT = 1
> +NORM = 2
> +
> +COLORS = {"red" : (1.0,0.0,0.0,1.0), "lime" : (0.0,1.0,0.0,1.0), "blue" : (0.0,0.0,1.0,1.0),
> + "maroon" : (0.5,0.0,0.0,1.0), "green" : (0.0,0.5,0.0,1.0), "navy" : (0.0,0.0,0.5,1.0),
> + "yellow" : (1.0,1.0,0.0,1.0), "magenta" : (1.0,0.0,1.0,1.0), "cyan" : (0.0,1.0,1.0,1.0),
> + "orange" : (1.0,0.5,0.0,1.0), "white" : (1.0,1.0,1.0,1.0), "black" : (0.0,0.0,0.0,1.0),
> + "gray" : (0.5,0.5,0.5,1.0), "light_gray" : (0.9,0.9,0.9,1.0),
> + "transparent" : (0.0,0.0,0.0,0.0)}
> +
> +THEMES = {"black_red" : [(0.0,0.0,0.0,1.0), (1.0,0.0,0.0,1.0)],
> + "red_green_blue" : [(1.0,0.0,0.0,1.0), (0.0,1.0,0.0,1.0), (0.0,0.0,1.0,1.0)],
> + "red_orange_yellow" : [(1.0,0.2,0.0,1.0), (1.0,0.7,0.0,1.0), (1.0,1.0,0.0,1.0)],
> + "yellow_orange_red" : [(1.0,1.0,0.0,1.0), (1.0,0.7,0.0,1.0), (1.0,0.2,0.0,1.0)],
> + "rainbow" : [(1.0,0.0,0.0,1.0), (1.0,0.5,0.0,1.0), (1.0,1.0,0.0,1.0), (0.0,1.0,0.0,1.0), (0.0,0.0,1.0,1.0), (0.3, 0.0, 0.5,1.0), (0.5, 0.0, 1.0, 1.0)]}
> +
> +def colors_from_theme( theme, series_length, mode = 'solid' ):
> + colors = []
> + if theme not in THEMES.keys() :
> + raise Exception, "Theme not defined"
> + color_steps = THEMES[theme]
> + n_colors = len(color_steps)
> + if series_length <= n_colors:
> + colors = [color + tuple([mode]) for color in color_steps[0:n_colors]]
> + else:
> + iterations = [(series_length - n_colors)/(n_colors - 1) for i in color_steps[:-1]]
> + over_iterations = (series_length - n_colors) % (n_colors - 1)
> + for i in range(n_colors - 1):
> + if over_iterations <= 0:
> + break
> + iterations[i] += 1
> + over_iterations -= 1
> + for index,color in enumerate(color_steps[:-1]):
> + colors.append(color + tuple([mode]))
> + if iterations[index] == 0:
> + continue
> + next_color = color_steps[index+1]
> + color_step = ((next_color[0] - color[0])/(iterations[index] + 1),
> + (next_color[1] - color[1])/(iterations[index] + 1),
> + (next_color[2] - color[2])/(iterations[index] + 1),
> + (next_color[3] - color[3])/(iterations[index] + 1))
> + for i in range( iterations[index] ):
> + colors.append((color[0] + color_step[0]*(i+1),
> + color[1] + color_step[1]*(i+1),
> + color[2] + color_step[2]*(i+1),
> + color[3] + color_step[3]*(i+1),
> + mode))
> + colors.append(color_steps[-1] + tuple([mode]))
> + return colors
> +
> +
> +def other_direction(direction):
> + "explicit is better than implicit"
> + if direction == HORZ:
> + return VERT
> + else:
> + return HORZ
> +
> +#Class definition
> +
> +class Plot(object):
> + def __init__(self,
> + surface=None,
> + data=None,
> + width=640,
> + height=480,
> + background=None,
> + border = 0,
> + x_labels = None,
> + y_labels = None,
> + series_colors = None):
> + random.seed(2)
> + self.create_surface(surface, width, height)
> + self.dimensions = {}
> + self.dimensions[HORZ] = width
> + self.dimensions[VERT] = height
> + self.context = cairo.Context(self.surface)
> + self.labels={}
> + self.labels[HORZ] = x_labels
> + self.labels[VERT] = y_labels
> + self.load_series(data, x_labels, y_labels, series_colors)
> + self.font_size = 10
> + self.set_background (background)
> + self.border = border
> + self.borders = {}
> + self.line_color = (0.5, 0.5, 0.5)
> + self.line_width = 0.5
> + self.label_color = (0.0, 0.0, 0.0)
> + self.grid_color = (0.8, 0.8, 0.8)
> +
> + def create_surface(self, surface, width=None, height=None):
> + self.filename = None
> + if isinstance(surface, cairo.Surface):
> + self.surface = surface
> + return
> + if not type(surface) in (str, unicode):
> + raise TypeError("Surface should be either a Cairo surface or a filename, not %s" % surface)
> + sufix = surface.rsplit(".")[-1].lower()
> + self.filename = surface
> + if sufix == "png":
> + self.surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, width, height)
> + elif sufix == "ps":
> + self.surface = cairo.PSSurface(surface, width, height)
> + elif sufix == "pdf":
> + self.surface = cairo.PSSurface(surface, width, height)
> + else:
> + if sufix != "svg":
> + self.filename += ".svg"
> + self.surface = cairo.SVGSurface(self.filename, width, height)
> +
> + def commit(self):
> + try:
> + self.context.show_page()
> + if self.filename and self.filename.endswith(".png"):
> + self.surface.write_to_png(self.filename)
> + else:
> + self.surface.finish()
> + except cairo.Error:
> + pass
> +
> + def load_series (self, data, x_labels=None, y_labels=None, series_colors=None):
> + self.series_labels = []
> + self.series = None
> +
> + #The pretty way
> + #if not isinstance(data, Series):
> + # # Not an instance of Series
> + # self.series = Series(data)
> + #else:
> + # self.series = data
> + #
> + #self.series_labels = self.series.get_names()
> +
> + #TODO: Remove on next version
> + # The ugly way, keeping retrocompatibility...
> + if callable(data) or type(data) is list and callable(data[0]): # Lambda or List of lambdas
> + self.series = data
> + self.series_labels = None
> + elif isinstance(data, Series): # Instance of Series
> + self.series = data
> + self.series_labels = data.get_names()
> + else: # Anything else
> + self.series = Series(data)
> + self.series_labels = self.series.get_names()
> +
> + #TODO: allow user passed series_widths
> + self.series_widths = [1.0 for group in self.series]
> +
> + #TODO: Remove on next version
> + self.process_colors( series_colors )
> +
> + def process_colors( self, series_colors, length = None, mode = 'solid' ):
> + #series_colors might be None, a theme, a string of colors names or a list of color tuples
> + if length is None :
> + length = len( self.series.to_list() )
> +
> + #no colors passed
> + if not series_colors:
> + #Randomize colors
> + self.series_colors = [ [random.random() for i in range(3)] + [1.0, mode] for series in range( length ) ]
> + else:
> + #Just theme pattern
> + if not hasattr( series_colors, "__iter__" ):
> + theme = series_colors
> + self.series_colors = colors_from_theme( theme.lower(), length )
> +
> + #Theme pattern and mode
> + elif not hasattr(series_colors, '__delitem__') and not hasattr( series_colors[0], "__iter__" ):
> + theme = series_colors[0]
> + mode = series_colors[1]
> + self.series_colors = colors_from_theme( theme.lower(), length, mode )
> +
> + #List
> + else:
> + self.series_colors = series_colors
> + for index, color in enumerate( self.series_colors ):
> + #element is a color name
> + if not hasattr(color, "__iter__"):
> + self.series_colors[index] = COLORS[color.lower()] + tuple([mode])
> + #element is rgb tuple instead of rgba
> + elif len( color ) == 3 :
> + self.series_colors[index] += (1.0,mode)
> + #element has 4 elements, might be rgba tuple or rgb tuple with mode
> + elif len( color ) == 4 :
> + #last element is mode
> + if not hasattr(color[3], "__iter__"):
> + self.series_colors[index] += tuple([color[3]])
> + self.series_colors[index][3] = 1.0
> + #last element is alpha
> + else:
> + self.series_colors[index] += tuple([mode])
> +
> + def get_width(self):
> + return self.surface.get_width()
> +
> + def get_height(self):
> + return self.surface.get_height()
> +
> + def set_background(self, background):
> + if background is None:
> + self.background = (0.0,0.0,0.0,0.0)
> + elif type(background) in (cairo.LinearGradient, tuple):
> + self.background = background
> + elif not hasattr(background,"__iter__"):
> + colors = background.split(" ")
> + if len(colors) == 1 and colors[0] in COLORS:
> + self.background = COLORS[background]
> + elif len(colors) > 1:
> + self.background = cairo.LinearGradient(self.dimensions[HORZ] / 2, 0, self.dimensions[HORZ] / 2, self.dimensions[VERT])
> + for index,color in enumerate(colors):
> + self.background.add_color_stop_rgba(float(index)/(len(colors)-1),*COLORS[color])
> + else:
> + raise TypeError ("Background should be either cairo.LinearGradient or a 3/4-tuple, not %s" % type(background))
> +
> + def render_background(self):
> + if isinstance(self.background, cairo.LinearGradient):
> + self.context.set_source(self.background)
> + else:
> + self.context.set_source_rgba(*self.background)
> + self.context.rectangle(0,0, self.dimensions[HORZ], self.dimensions[VERT])
> + self.context.fill()
> +
> + def render_bounding_box(self):
> + self.context.set_source_rgba(*self.line_color)
> + self.context.set_line_width(self.line_width)
> + self.context.rectangle(self.border, self.border,
> + self.dimensions[HORZ] - 2 * self.border,
> + self.dimensions[VERT] - 2 * self.border)
> + self.context.stroke()
> +
> + def render(self):
> + pass
> +
> +class ScatterPlot( Plot ):
> + def __init__(self,
> + surface=None,
> + data=None,
> + errorx=None,
> + errory=None,
> + width=640,
> + height=480,
> + background=None,
> + border=0,
> + axis = False,
> + dash = False,
> + discrete = False,
> + dots = 0,
> + grid = False,
> + series_legend = False,
> + x_labels = None,
> + y_labels = None,
> + x_bounds = None,
> + y_bounds = None,
> + z_bounds = None,
> + x_title = None,
> + y_title = None,
> + series_colors = None,
> + circle_colors = None ):
> +
> + self.bounds = {}
> + self.bounds[HORZ] = x_bounds
> + self.bounds[VERT] = y_bounds
> + self.bounds[NORM] = z_bounds
> + self.titles = {}
> + self.titles[HORZ] = x_title
> + self.titles[VERT] = y_title
> + self.max_value = {}
> + self.axis = axis
> + self.discrete = discrete
> + self.dots = dots
> + self.grid = grid
> + self.series_legend = series_legend
> + self.variable_radius = False
> + self.x_label_angle = math.pi / 2.5
> + self.circle_colors = circle_colors
> +
> + Plot.__init__(self, surface, data, width, height, background, border, x_labels, y_labels, series_colors)
> +
> + self.dash = None
> + if dash:
> + if hasattr(dash, "keys"):
> + self.dash = [dash[key] for key in self.series_labels]
> + elif max([hasattr(item,'__delitem__') for item in data]) :
> + self.dash = dash
> + else:
> + self.dash = [dash]
> +
> + self.load_errors(errorx, errory)
> +
> + def convert_list_to_tuple(self, data):
> + #Data must be converted from lists of coordinates to a single
> + # list of tuples
> + out_data = zip(*data)
> + if len(data) == 3:
> + self.variable_radius = True
> + return out_data
> +
> + def load_series(self, data, x_labels = None, y_labels = None, series_colors=None):
> + #TODO: In cairoplot 2.0 keep only the Series instances
> +
> + # Convert Data and Group to Series
> + if isinstance(data, Data) or isinstance(data, Group):
> + data = Series(data)
> +
> + # Series
> + if isinstance(data, Series):
> + for group in data:
> + for item in group:
> + if len(item) is 3:
> + self.variable_radius = True
> +
> + #Dictionary with lists
> + if hasattr(data, "keys") :
> + if hasattr( data.values()[0][0], "__delitem__" ) :
> + for key in data.keys() :
> + data[key] = self.convert_list_to_tuple(data[key])
> + elif len(data.values()[0][0]) == 3:
> + self.variable_radius = True
> + #List
> + elif hasattr(data[0], "__delitem__") :
> + #List of lists
> + if hasattr(data[0][0], "__delitem__") :
> + for index,value in enumerate(data) :
> + data[index] = self.convert_list_to_tuple(value)
> + #List
> + elif type(data[0][0]) != type((0,0)):
> + data = self.convert_list_to_tuple(data)
> + #Three dimensional data
> + elif len(data[0][0]) == 3:
> + self.variable_radius = True
> +
> + #List with three dimensional tuples
> + elif len(data[0]) == 3:
> + self.variable_radius = True
> + Plot.load_series(self, data, x_labels, y_labels, series_colors)
> + self.calc_boundaries()
> + self.calc_labels()
> +
> + def load_errors(self, errorx, errory):
> + self.errors = None
> + if errorx == None and errory == None:
> + return
> + self.errors = {}
> + self.errors[HORZ] = None
> + self.errors[VERT] = None
> + #asimetric errors
> + if errorx and hasattr(errorx[0], "__delitem__"):
> + self.errors[HORZ] = errorx
> + #simetric errors
> + elif errorx:
> + self.errors[HORZ] = [errorx]
> + #asimetric errors
> + if errory and hasattr(errory[0], "__delitem__"):
> + self.errors[VERT] = errory
> + #simetric errors
> + elif errory:
> + self.errors[VERT] = [errory]
> +
> + def calc_labels(self):
> + if not self.labels[HORZ]:
> + amplitude = self.bounds[HORZ][1] - self.bounds[HORZ][0]
> + if amplitude % 10: #if horizontal labels need floating points
> + self.labels[HORZ] = ["%.2lf" % (float(self.bounds[HORZ][0] + (amplitude * i / 10.0))) for i in range(11) ]
> + else:
> + self.labels[HORZ] = ["%d" % (int(self.bounds[HORZ][0] + (amplitude * i / 10.0))) for i in range(11) ]
> + if not self.labels[VERT]:
> + amplitude = self.bounds[VERT][1] - self.bounds[VERT][0]
> + if amplitude % 10: #if vertical labels need floating points
> + self.labels[VERT] = ["%.2lf" % (float(self.bounds[VERT][0] + (amplitude * i / 10.0))) for i in range(11) ]
> + else:
> + self.labels[VERT] = ["%d" % (int(self.bounds[VERT][0] + (amplitude * i / 10.0))) for i in range(11) ]
> +
> + def calc_extents(self, direction):
> + self.context.set_font_size(self.font_size * 0.8)
> + self.max_value[direction] = max(self.context.text_extents(item)[2] for item in self.labels[direction])
> + self.borders[other_direction(direction)] = self.max_value[direction] + self.border + 20
> +
> + def calc_boundaries(self):
> + #HORZ = 0, VERT = 1, NORM = 2
> + min_data_value = [0,0,0]
> + max_data_value = [0,0,0]
> +
> + for group in self.series:
> + if type(group[0].content) in (int, float, long):
> + group = [Data((index, item.content)) for index,item in enumerate(group)]
> +
> + for point in group:
> + for index, item in enumerate(point.content):
> + if item > max_data_value[index]:
> + max_data_value[index] = item
> + elif item < min_data_value[index]:
> + min_data_value[index] = item
> +
> + if not self.bounds[HORZ]:
> + self.bounds[HORZ] = (min_data_value[HORZ], max_data_value[HORZ])
> + if not self.bounds[VERT]:
> + self.bounds[VERT] = (min_data_value[VERT], max_data_value[VERT])
> + if not self.bounds[NORM]:
> + self.bounds[NORM] = (min_data_value[NORM], max_data_value[NORM])
> +
> + def calc_all_extents(self):
> + self.calc_extents(HORZ)
> + self.calc_extents(VERT)
> +
> + self.plot_height = self.dimensions[VERT] - 2 * self.borders[VERT]
> + self.plot_width = self.dimensions[HORZ] - 2* self.borders[HORZ]
> +
> + self.plot_top = self.dimensions[VERT] - self.borders[VERT]
> +
> + def calc_steps(self):
> + #Calculates all the x, y, z and color steps
> + series_amplitude = [self.bounds[index][1] - self.bounds[index][0] for index in range(3)]
> +
> + if series_amplitude[HORZ]:
> + self.horizontal_step = float (self.plot_width) / series_amplitude[HORZ]
> + else:
> + self.horizontal_step = 0.00
> +
> + if series_amplitude[VERT]:
> + self.vertical_step = float (self.plot_height) / series_amplitude[VERT]
> + else:
> + self.vertical_step = 0.00
> +
> + if series_amplitude[NORM]:
> + if self.variable_radius:
> + self.z_step = float (self.bounds[NORM][1]) / series_amplitude[NORM]
> + if self.circle_colors:
> + self.circle_color_step = tuple([float(self.circle_colors[1][i]-self.circle_colors[0][i])/series_amplitude[NORM] for i in range(4)])
> + else:
> + self.z_step = 0.00
> + self.circle_color_step = ( 0.0, 0.0, 0.0, 0.0 )
> +
> + def get_circle_color(self, value):
> + return tuple( [self.circle_colors[0][i] + value*self.circle_color_step[i] for i in range(4)] )
> +
> + def render(self):
> + self.calc_all_extents()
> + self.calc_steps()
> + self.render_background()
> + self.render_bounding_box()
> + if self.axis:
> + self.render_axis()
> + if self.grid:
> + self.render_grid()
> + self.render_labels()
> + self.render_plot()
> + if self.errors:
> + self.render_errors()
> + if self.series_legend and self.series_labels:
> + self.render_legend()
> +
> + def render_axis(self):
> + #Draws both the axis lines and their titles
> + cr = self.context
> + cr.set_source_rgba(*self.line_color)
> + cr.move_to(self.borders[HORZ], self.dimensions[VERT] - self.borders[VERT])
> + cr.line_to(self.borders[HORZ], self.borders[VERT])
> + cr.stroke()
> +
> + cr.move_to(self.borders[HORZ], self.dimensions[VERT] - self.borders[VERT])
> + cr.line_to(self.dimensions[HORZ] - self.borders[HORZ], self.dimensions[VERT] - self.borders[VERT])
> + cr.stroke()
> +
> + cr.set_source_rgba(*self.label_color)
> + self.context.set_font_size( 1.2 * self.font_size )
> + if self.titles[HORZ]:
> + title_width,title_height = cr.text_extents(self.titles[HORZ])[2:4]
> + cr.move_to( self.dimensions[HORZ]/2 - title_width/2, self.borders[VERT] - title_height/2 )
> + cr.show_text( self.titles[HORZ] )
> +
> + if self.titles[VERT]:
> + title_width,title_height = cr.text_extents(self.titles[VERT])[2:4]
> + cr.move_to( self.dimensions[HORZ] - self.borders[HORZ] + title_height/2, self.dimensions[VERT]/2 - title_width/2)
> + cr.save()
> + cr.rotate( math.pi/2 )
> + cr.show_text( self.titles[VERT] )
> + cr.restore()
> +
> + def render_grid(self):
> + cr = self.context
> + horizontal_step = float( self.plot_height ) / ( len( self.labels[VERT] ) - 1 )
> + vertical_step = float( self.plot_width ) / ( len( self.labels[HORZ] ) - 1 )
> +
> + x = self.borders[HORZ] + vertical_step
> + y = self.plot_top - horizontal_step
> +
> + for label in self.labels[HORZ][:-1]:
> + cr.set_source_rgba(*self.grid_color)
> + cr.move_to(x, self.dimensions[VERT] - self.borders[VERT])
> + cr.line_to(x, self.borders[VERT])
> + cr.stroke()
> + x += vertical_step
> + for label in self.labels[VERT][:-1]:
> + cr.set_source_rgba(*self.grid_color)
> + cr.move_to(self.borders[HORZ], y)
> + cr.line_to(self.dimensions[HORZ] - self.borders[HORZ], y)
> + cr.stroke()
> + y -= horizontal_step
> +
> + def render_labels(self):
> + self.context.set_font_size(self.font_size * 0.8)
> + self.render_horz_labels()
> + self.render_vert_labels()
> +
> + def render_horz_labels(self):
> + cr = self.context
> + step = float( self.plot_width ) / ( len( self.labels[HORZ] ) - 1 )
> + x = self.borders[HORZ]
> + y = self.dimensions[VERT] - self.borders[VERT] + 5
> +
> + # store rotation matrix from the initial state
> + rotation_matrix = cr.get_matrix()
> + rotation_matrix.rotate(self.x_label_angle)
> +
> + cr.set_source_rgba(*self.label_color)
> +
> + for item in self.labels[HORZ]:
> + width = cr.text_extents(item)[2]
> + cr.move_to(x, y)
> + cr.save()
> + cr.set_matrix(rotation_matrix)
> + cr.show_text(item)
> + cr.restore()
> + x += step
> +
> + def render_vert_labels(self):
> + cr = self.context
> + step = ( self.plot_height ) / ( len( self.labels[VERT] ) - 1 )
> + y = self.plot_top
> + cr.set_source_rgba(*self.label_color)
> + for item in self.labels[VERT]:
> + width = cr.text_extents(item)[2]
> + cr.move_to(self.borders[HORZ] - width - 5,y)
> + cr.show_text(item)
> + y -= step
> +
> + def render_legend(self):
> + cr = self.context
> + cr.set_font_size(self.font_size)
> + cr.set_line_width(self.line_width)
> +
> + widest_word = max(self.series_labels, key = lambda item: self.context.text_extents(item)[2])
> + tallest_word = max(self.series_labels, key = lambda item: self.context.text_extents(item)[3])
> + max_width = self.context.text_extents(widest_word)[2]
> + max_height = self.context.text_extents(tallest_word)[3] * 1.1
> +
> + color_box_height = max_height / 2
> + color_box_width = color_box_height * 2
> +
> + #Draw a bounding box
> + bounding_box_width = max_width + color_box_width + 15
> + bounding_box_height = (len(self.series_labels)+0.5) * max_height
> + cr.set_source_rgba(1,1,1)
> + cr.rectangle(self.dimensions[HORZ] - self.borders[HORZ] - bounding_box_width, self.borders[VERT],
> + bounding_box_width, bounding_box_height)
> + cr.fill()
> +
> + cr.set_source_rgba(*self.line_color)
> + cr.set_line_width(self.line_width)
> + cr.rectangle(self.dimensions[HORZ] - self.borders[HORZ] - bounding_box_width, self.borders[VERT],
> + bounding_box_width, bounding_box_height)
> + cr.stroke()
> +
> + for idx,key in enumerate(self.series_labels):
> + #Draw color box
> + cr.set_source_rgba(*self.series_colors[idx][:4])
> + cr.rectangle(self.dimensions[HORZ] - self.borders[HORZ] - max_width - color_box_width - 10,
> + self.borders[VERT] + color_box_height + (idx*max_height) ,
> + color_box_width, color_box_height)
> + cr.fill()
> +
> + cr.set_source_rgba(0, 0, 0)
> + cr.rectangle(self.dimensions[HORZ] - self.borders[HORZ] - max_width - color_box_width - 10,
> + self.borders[VERT] + color_box_height + (idx*max_height),
> + color_box_width, color_box_height)
> + cr.stroke()
> +
> + #Draw series labels
> + cr.set_source_rgba(0, 0, 0)
> + cr.move_to(self.dimensions[HORZ] - self.borders[HORZ] - max_width - 5, self.borders[VERT] + ((idx+1)*max_height))
> + cr.show_text(key)
> +
> + def render_errors(self):
> + cr = self.context
> + cr.rectangle(self.borders[HORZ], self.borders[VERT], self.plot_width, self.plot_height)
> + cr.clip()
> + radius = self.dots
> + x0 = self.borders[HORZ] - self.bounds[HORZ][0]*self.horizontal_step
> + y0 = self.borders[VERT] - self.bounds[VERT][0]*self.vertical_step
> + for index, group in enumerate(self.series):
> + cr.set_source_rgba(*self.series_colors[index][:4])
> + for number, data in enumerate(group):
> + x = x0 + self.horizontal_step * data.content[0]
> + y = self.dimensions[VERT] - y0 - self.vertical_step * data.content[1]
> + if self.errors[HORZ]:
> + cr.move_to(x, y)
> + x1 = x - self.horizontal_step * self.errors[HORZ][0][number]
> + cr.line_to(x1, y)
> + cr.line_to(x1, y - radius)
> + cr.line_to(x1, y + radius)
> + cr.stroke()
> + if self.errors[HORZ] and len(self.errors[HORZ]) == 2:
> + cr.move_to(x, y)
> + x1 = x + self.horizontal_step * self.errors[HORZ][1][number]
> + cr.line_to(x1, y)
> + cr.line_to(x1, y - radius)
> + cr.line_to(x1, y + radius)
> + cr.stroke()
> + if self.errors[VERT]:
> + cr.move_to(x, y)
> + y1 = y + self.vertical_step * self.errors[VERT][0][number]
> + cr.line_to(x, y1)
> + cr.line_to(x - radius, y1)
> + cr.line_to(x + radius, y1)
> + cr.stroke()
> + if self.errors[VERT] and len(self.errors[VERT]) == 2:
> + cr.move_to(x, y)
> + y1 = y - self.vertical_step * self.errors[VERT][1][number]
> + cr.line_to(x, y1)
> + cr.line_to(x - radius, y1)
> + cr.line_to(x + radius, y1)
> + cr.stroke()
> +
> +
> + def render_plot(self):
> + cr = self.context
> + if self.discrete:
> + cr.rectangle(self.borders[HORZ], self.borders[VERT], self.plot_width, self.plot_height)
> + cr.clip()
> + x0 = self.borders[HORZ] - self.bounds[HORZ][0]*self.horizontal_step
> + y0 = self.borders[VERT] - self.bounds[VERT][0]*self.vertical_step
> + radius = self.dots
> + for number, group in enumerate (self.series):
> + cr.set_source_rgba(*self.series_colors[number][:4])
> + for data in group :
> + if self.variable_radius:
> + radius = data.content[2]*self.z_step
> + if self.circle_colors:
> + cr.set_source_rgba( *self.get_circle_color( data.content[2]) )
> + x = x0 + self.horizontal_step*data.content[0]
> + y = y0 + self.vertical_step*data.content[1]
> + cr.arc(x, self.dimensions[VERT] - y, radius, 0, 2*math.pi)
> + cr.fill()
> + else:
> + cr.rectangle(self.borders[HORZ], self.borders[VERT], self.plot_width, self.plot_height)
> + cr.clip()
> + x0 = self.borders[HORZ] - self.bounds[HORZ][0]*self.horizontal_step
> + y0 = self.borders[VERT] - self.bounds[VERT][0]*self.vertical_step
> + radius = self.dots
> + for number, group in enumerate (self.series):
> + last_data = None
> + cr.set_source_rgba(*self.series_colors[number][:4])
> + for data in group :
> + x = x0 + self.horizontal_step*data.content[0]
> + y = y0 + self.vertical_step*data.content[1]
> + if self.dots:
> + if self.variable_radius:
> + radius = data.content[2]*self.z_step
> + cr.arc(x, self.dimensions[VERT] - y, radius, 0, 2*math.pi)
> + cr.fill()
> + if last_data :
> + old_x = x0 + self.horizontal_step*last_data.content[0]
> + old_y = y0 + self.vertical_step*last_data.content[1]
> + cr.move_to( old_x, self.dimensions[VERT] - old_y )
> + cr.line_to( x, self.dimensions[VERT] - y)
> + cr.set_line_width(self.series_widths[number])
> +
> + # Display line as dash line
> + if self.dash and self.dash[number]:
> + s = self.series_widths[number]
> + cr.set_dash([s*3, s*3], 0)
> +
> + cr.stroke()
> + cr.set_dash([])
> + last_data = data
> +
> +class DotLinePlot(ScatterPlot):
> + def __init__(self,
> + surface=None,
> + data=None,
> + width=640,
> + height=480,
> + background=None,
> + border=0,
> + axis = False,
> + dash = False,
> + dots = 0,
> + grid = False,
> + series_legend = False,
> + x_labels = None,
> + y_labels = None,
> + x_bounds = None,
> + y_bounds = None,
> + x_title = None,
> + y_title = None,
> + series_colors = None):
> +
> + ScatterPlot.__init__(self, surface, data, None, None, width, height, background, border,
> + axis, dash, False, dots, grid, series_legend, x_labels, y_labels,
> + x_bounds, y_bounds, None, x_title, y_title, series_colors, None )
> +
> +
> + def load_series(self, data, x_labels = None, y_labels = None, series_colors=None):
> + Plot.load_series(self, data, x_labels, y_labels, series_colors)
> + for group in self.series :
> + for index,data in enumerate(group):
> + group[index].content = (index, data.content)
> +
> + self.calc_boundaries()
> + self.calc_labels()
> +
> +class FunctionPlot(ScatterPlot):
> + def __init__(self,
> + surface=None,
> + data=None,
> + width=640,
> + height=480,
> + background=None,
> + border=0,
> + axis = False,
> + discrete = False,
> + dots = 0,
> + grid = False,
> + series_legend = False,
> + x_labels = None,
> + y_labels = None,
> + x_bounds = None,
> + y_bounds = None,
> + x_title = None,
> + y_title = None,
> + series_colors = None,
> + step = 1):
> +
> + self.function = data
> + self.step = step
> + self.discrete = discrete
> +
> + data, x_bounds = self.load_series_from_function( self.function, x_bounds )
> +
> + ScatterPlot.__init__(self, surface, data, None, None, width, height, background, border,
> + axis, False, discrete, dots, grid, series_legend, x_labels, y_labels,
> + x_bounds, y_bounds, None, x_title, y_title, series_colors, None )
> +
> + def load_series(self, data, x_labels = None, y_labels = None, series_colors=None):
> + Plot.load_series(self, data, x_labels, y_labels, series_colors)
> +
> + if len(self.series[0][0]) is 1:
> + for group_id, group in enumerate(self.series) :
> + for index,data in enumerate(group):
> + group[index].content = (self.bounds[HORZ][0] + self.step*index, data.content)
> +
> + self.calc_boundaries()
> + self.calc_labels()
> +
> + def load_series_from_function( self, function, x_bounds ):
> + #TODO: Add the possibility for the user to define multiple functions with different discretization parameters
> +
> + #This function converts a function, a list of functions or a dictionary
> + #of functions into its corresponding array of data
> + series = Series()
> +
> + if isinstance(function, Group) or isinstance(function, Data):
> + function = Series(function)
> +
> + # If is instance of Series
> + if isinstance(function, Series):
> + # Overwrite any bounds passed by the function
> + x_bounds = (function.range[0],function.range[-1])
> +
> + #if no bounds are provided
> + if x_bounds == None:
> + x_bounds = (0,10)
> +
> +
> + #TODO: Finish the dict translation
> + if hasattr(function, "keys"): #dictionary:
> + for key in function.keys():
> + group = Group(name=key)
> + #data[ key ] = []
> + i = x_bounds[0]
> + while i <= x_bounds[1] :
> + group.add_data(function[ key ](i))
> + #data[ key ].append( function[ key ](i) )
> + i += self.step
> + series.add_group(group)
> +
> + elif hasattr(function, "__delitem__"): #list of functions
> + for index,f in enumerate( function ) :
> + group = Group()
> + #data.append( [] )
> + i = x_bounds[0]
> + while i <= x_bounds[1] :
> + group.add_data(f(i))
> + #data[ index ].append( f(i) )
> + i += self.step
> + series.add_group(group)
> +
> + elif isinstance(function, Series): # instance of Series
> + series = function
> +
> + else: #function
> + group = Group()
> + i = x_bounds[0]
> + while i <= x_bounds[1] :
> + group.add_data(function(i))
> + i += self.step
> + series.add_group(group)
> +
> +
> + return series, x_bounds
> +
> + def calc_labels(self):
> + if not self.labels[HORZ]:
> + self.labels[HORZ] = []
> + i = self.bounds[HORZ][0]
> + while i<=self.bounds[HORZ][1]:
> + self.labels[HORZ].append(str(i))
> + i += float(self.bounds[HORZ][1] - self.bounds[HORZ][0])/10
> + ScatterPlot.calc_labels(self)
> +
> + def render_plot(self):
> + if not self.discrete:
> + ScatterPlot.render_plot(self)
> + else:
> + last = None
> + cr = self.context
> + for number, group in enumerate (self.series):
> + cr.set_source_rgba(*self.series_colors[number][:4])
> + x0 = self.borders[HORZ] - self.bounds[HORZ][0]*self.horizontal_step
> + y0 = self.borders[VERT] - self.bounds[VERT][0]*self.vertical_step
> + for data in group:
> + x = x0 + self.horizontal_step * data.content[0]
> + y = y0 + self.vertical_step * data.content[1]
> + cr.move_to(x, self.dimensions[VERT] - y)
> + cr.line_to(x, self.plot_top)
> + cr.set_line_width(self.series_widths[number])
> + cr.stroke()
> + if self.dots:
> + cr.new_path()
> + cr.arc(x, self.dimensions[VERT] - y, 3, 0, 2.1 * math.pi)
> + cr.close_path()
> + cr.fill()
> +
> +class BarPlot(Plot):
> + def __init__(self,
> + surface = None,
> + data = None,
> + width = 640,
> + height = 480,
> + background = "white light_gray",
> + border = 0,
> + display_values = False,
> + grid = False,
> + rounded_corners = False,
> + stack = False,
> + three_dimension = False,
> + x_labels = None,
> + y_labels = None,
> + x_bounds = None,
> + y_bounds = None,
> + series_colors = None,
> + main_dir = None):
> +
> + self.bounds = {}
> + self.bounds[HORZ] = x_bounds
> + self.bounds[VERT] = y_bounds
> + self.display_values = display_values
> + self.grid = grid
> + self.rounded_corners = rounded_corners
> + self.stack = stack
> + self.three_dimension = three_dimension
> + self.x_label_angle = math.pi / 2.5
> + self.main_dir = main_dir
> + self.max_value = {}
> + self.plot_dimensions = {}
> + self.steps = {}
> + self.value_label_color = (0.5,0.5,0.5,1.0)
> +
> + Plot.__init__(self, surface, data, width, height, background, border, x_labels, y_labels, series_colors)
> +
> + def load_series(self, data, x_labels = None, y_labels = None, series_colors = None):
> + Plot.load_series(self, data, x_labels, y_labels, series_colors)
> + self.calc_boundaries()
> +
> + def process_colors(self, series_colors):
> + #Data for a BarPlot might be a List or a List of Lists.
> + #On the first case, colors must be generated for all bars,
> + #On the second, colors must be generated for each of the inner lists.
> +
> + #TODO: Didn't get it...
> + #if hasattr(self.data[0], '__getitem__'):
> + # length = max(len(series) for series in self.data)
> + #else:
> + # length = len( self.data )
> +
> + length = max(len(group) for group in self.series)
> +
> + Plot.process_colors( self, series_colors, length, 'linear')
> +
> + def calc_boundaries(self):
> + if not self.bounds[self.main_dir]:
> + if self.stack:
> + max_data_value = max(sum(group.to_list()) for group in self.series)
> + else:
> + max_data_value = max(max(group.to_list()) for group in self.series)
> + self.bounds[self.main_dir] = (0, max_data_value)
> + if not self.bounds[other_direction(self.main_dir)]:
> + self.bounds[other_direction(self.main_dir)] = (0, len(self.series))
> +
> + def calc_extents(self, direction):
> + self.max_value[direction] = 0
> + if self.labels[direction]:
> + widest_word = max(self.labels[direction], key = lambda item: self.context.text_extents(item)[2])
> + self.max_value[direction] = self.context.text_extents(widest_word)[3 - direction]
> + self.borders[other_direction(direction)] = (2-direction)*self.max_value[direction] + self.border + direction*(5)
> + else:
> + self.borders[other_direction(direction)] = self.border
> +
> + def calc_horz_extents(self):
> + self.calc_extents(HORZ)
> +
> + def calc_vert_extents(self):
> + self.calc_extents(VERT)
> +
> + def calc_all_extents(self):
> + self.calc_horz_extents()
> + self.calc_vert_extents()
> + other_dir = other_direction(self.main_dir)
> + self.value_label = 0
> + if self.display_values:
> + if self.stack:
> + self.value_label = self.context.text_extents(str(max(sum(group.to_list()) for group in self.series)))[2 + self.main_dir]
> + else:
> + self.value_label = self.context.text_extents(str(max(max(group.to_list()) for group in self.series)))[2 + self.main_dir]
> + if self.labels[self.main_dir]:
> + self.plot_dimensions[self.main_dir] = self.dimensions[self.main_dir] - 2*self.borders[self.main_dir] - self.value_label
> + else:
> + self.plot_dimensions[self.main_dir] = self.dimensions[self.main_dir] - self.borders[self.main_dir] - 1.2*self.border - self.value_label
> + self.plot_dimensions[other_dir] = self.dimensions[other_dir] - self.borders[other_dir] - self.border
> + self.plot_top = self.dimensions[VERT] - self.borders[VERT]
> +
> + def calc_steps(self):
> + other_dir = other_direction(self.main_dir)
> + self.series_amplitude = self.bounds[self.main_dir][1] - self.bounds[self.main_dir][0]
> + if self.series_amplitude:
> + self.steps[self.main_dir] = float(self.plot_dimensions[self.main_dir])/self.series_amplitude
> + else:
> + self.steps[self.main_dir] = 0.00
> + series_length = len(self.series)
> + self.steps[other_dir] = float(self.plot_dimensions[other_dir])/(series_length + 0.1*(series_length + 1))
> + self.space = 0.1*self.steps[other_dir]
> +
> + def render(self):
> + self.calc_all_extents()
> + self.calc_steps()
> + self.render_background()
> + self.render_bounding_box()
> + if self.grid:
> + self.render_grid()
> + if self.three_dimension:
> + self.render_ground()
> + if self.display_values:
> + self.render_values()
> + self.render_labels()
> + self.render_plot()
> + if self.series_labels:
> + self.render_legend()
> +
> + def draw_3d_rectangle_front(self, x0, y0, x1, y1, shift):
> + self.context.rectangle(x0-shift, y0+shift, x1-x0, y1-y0)
> +
> + def draw_3d_rectangle_side(self, x0, y0, x1, y1, shift):
> + self.context.move_to(x1-shift,y0+shift)
> + self.context.line_to(x1, y0)
> + self.context.line_to(x1, y1)
> + self.context.line_to(x1-shift, y1+shift)
> + self.context.line_to(x1-shift, y0+shift)
> + self.context.close_path()
> +
> + def draw_3d_rectangle_top(self, x0, y0, x1, y1, shift):
> + self.context.move_to(x0-shift,y0+shift)
> + self.context.line_to(x0, y0)
> + self.context.line_to(x1, y0)
> + self.context.line_to(x1-shift, y0+shift)
> + self.context.line_to(x0-shift, y0+shift)
> + self.context.close_path()
> +
> + def draw_round_rectangle(self, x0, y0, x1, y1):
> + self.context.arc(x0+5, y0+5, 5, -math.pi, -math.pi/2)
> + self.context.line_to(x1-5, y0)
> + self.context.arc(x1-5, y0+5, 5, -math.pi/2, 0)
> + self.context.line_to(x1, y1-5)
> + self.context.arc(x1-5, y1-5, 5, 0, math.pi/2)
> + self.context.line_to(x0+5, y1)
> + self.context.arc(x0+5, y1-5, 5, math.pi/2, math.pi)
> + self.context.line_to(x0, y0+5)
> + self.context.close_path()
> +
> + def render_ground(self):
> + self.draw_3d_rectangle_front(self.borders[HORZ], self.dimensions[VERT] - self.borders[VERT],
> + self.dimensions[HORZ] - self.borders[HORZ], self.dimensions[VERT] - self.borders[VERT] + 5, 10)
> + self.context.fill()
> +
> + self.draw_3d_rectangle_side (self.borders[HORZ], self.dimensions[VERT] - self.borders[VERT],
> + self.dimensions[HORZ] - self.borders[HORZ], self.dimensions[VERT] - self.borders[VERT] + 5, 10)
> + self.context.fill()
> +
> + self.draw_3d_rectangle_top (self.borders[HORZ], self.dimensions[VERT] - self.borders[VERT],
> + self.dimensions[HORZ] - self.borders[HORZ], self.dimensions[VERT] - self.borders[VERT] + 5, 10)
> + self.context.fill()
> +
> + def render_labels(self):
> + self.context.set_font_size(self.font_size * 0.8)
> + if self.labels[HORZ]:
> + self.render_horz_labels()
> + if self.labels[VERT]:
> + self.render_vert_labels()
> +
> + def render_legend(self):
> + cr = self.context
> + cr.set_font_size(self.font_size)
> + cr.set_line_width(self.line_width)
> +
> + widest_word = max(self.series_labels, key = lambda item: self.context.text_extents(item)[2])
> + tallest_word = max(self.series_labels, key = lambda item: self.context.text_extents(item)[3])
> + max_width = self.context.text_extents(widest_word)[2]
> + max_height = self.context.text_extents(tallest_word)[3] * 1.1 + 5
> +
> + color_box_height = max_height / 2
> + color_box_width = color_box_height * 2
> +
> + #Draw a bounding box
> + bounding_box_width = max_width + color_box_width + 15
> + bounding_box_height = (len(self.series_labels)+0.5) * max_height
> + cr.set_source_rgba(1,1,1)
> + cr.rectangle(self.dimensions[HORZ] - self.border - bounding_box_width, self.border,
> + bounding_box_width, bounding_box_height)
> + cr.fill()
> +
> + cr.set_source_rgba(*self.line_color)
> + cr.set_line_width(self.line_width)
> + cr.rectangle(self.dimensions[HORZ] - self.border - bounding_box_width, self.border,
> + bounding_box_width, bounding_box_height)
> + cr.stroke()
> +
> + for idx,key in enumerate(self.series_labels):
> + #Draw color box
> + cr.set_source_rgba(*self.series_colors[idx][:4])
> + cr.rectangle(self.dimensions[HORZ] - self.border - max_width - color_box_width - 10,
> + self.border + color_box_height + (idx*max_height) ,
> + color_box_width, color_box_height)
> + cr.fill()
> +
> + cr.set_source_rgba(0, 0, 0)
> + cr.rectangle(self.dimensions[HORZ] - self.border - max_width - color_box_width - 10,
> + self.border + color_box_height + (idx*max_height),
> + color_box_width, color_box_height)
> + cr.stroke()
> +
> + #Draw series labels
> + cr.set_source_rgba(0, 0, 0)
> + cr.move_to(self.dimensions[HORZ] - self.border - max_width - 5, self.border + ((idx+1)*max_height))
> + cr.show_text(key)
> +
> +
> +class HorizontalBarPlot(BarPlot):
> + def __init__(self,
> + surface = None,
> + data = None,
> + width = 640,
> + height = 480,
> + background = "white light_gray",
> + border = 0,
> + display_values = False,
> + grid = False,
> + rounded_corners = False,
> + stack = False,
> + three_dimension = False,
> + series_labels = None,
> + x_labels = None,
> + y_labels = None,
> + x_bounds = None,
> + y_bounds = None,
> + series_colors = None):
> +
> + BarPlot.__init__(self, surface, data, width, height, background, border,
> + display_values, grid, rounded_corners, stack, three_dimension,
> + x_labels, y_labels, x_bounds, y_bounds, series_colors, HORZ)
> + self.series_labels = series_labels
> +
> + def calc_vert_extents(self):
> + self.calc_extents(VERT)
> + if self.labels[HORZ] and not self.labels[VERT]:
> + self.borders[HORZ] += 10
> +
> + def draw_rectangle_bottom(self, x0, y0, x1, y1):
> + self.context.arc(x0+5, y1-5, 5, math.pi/2, math.pi)
> + self.context.line_to(x0, y0+5)
> + self.context.arc(x0+5, y0+5, 5, -math.pi, -math.pi/2)
> + self.context.line_to(x1, y0)
> + self.context.line_to(x1, y1)
> + self.context.line_to(x0+5, y1)
> + self.context.close_path()
> +
> + def draw_rectangle_top(self, x0, y0, x1, y1):
> + self.context.arc(x1-5, y0+5, 5, -math.pi/2, 0)
> + self.context.line_to(x1, y1-5)
> + self.context.arc(x1-5, y1-5, 5, 0, math.pi/2)
> + self.context.line_to(x0, y1)
> + self.context.line_to(x0, y0)
> + self.context.line_to(x1, y0)
> + self.context.close_path()
> +
> + def draw_rectangle(self, index, length, x0, y0, x1, y1):
> + if length == 1:
> + BarPlot.draw_rectangle(self, x0, y0, x1, y1)
> + elif index == 0:
> + self.draw_rectangle_bottom(x0, y0, x1, y1)
> + elif index == length-1:
> + self.draw_rectangle_top(x0, y0, x1, y1)
> + else:
> + self.context.rectangle(x0, y0, x1-x0, y1-y0)
> +
> + #TODO: Review BarPlot.render_grid code
> + def render_grid(self):
> + self.context.set_source_rgba(0.8, 0.8, 0.8)
> + if self.labels[HORZ]:
> + self.context.set_font_size(self.font_size * 0.8)
> + step = (self.dimensions[HORZ] - 2*self.borders[HORZ] - self.value_label)/(len(self.labels[HORZ])-1)
> + x = self.borders[HORZ]
> + next_x = 0
> + for item in self.labels[HORZ]:
> + width = self.context.text_extents(item)[2]
> + if x - width/2 > next_x and x - width/2 > self.border:
> + self.context.move_to(x, self.border)
> + self.context.line_to(x, self.dimensions[VERT] - self.borders[VERT])
> + self.context.stroke()
> + next_x = x + width/2
> + x += step
> + else:
> + lines = 11
> + horizontal_step = float(self.plot_dimensions[HORZ])/(lines-1)
> + x = self.borders[HORZ]
> + for y in xrange(0, lines):
> + self.context.move_to(x, self.border)
> + self.context.line_to(x, self.dimensions[VERT] - self.borders[VERT])
> + self.context.stroke()
> + x += horizontal_step
> +
> + def render_horz_labels(self):
> + step = (self.dimensions[HORZ] - 2*self.borders[HORZ])/(len(self.labels[HORZ])-1)
> + x = self.borders[HORZ]
> + next_x = 0
> +
> + for item in self.labels[HORZ]:
> + self.context.set_source_rgba(*self.label_color)
> + width = self.context.text_extents(item)[2]
> + if x - width/2 > next_x and x - width/2 > self.border:
> + self.context.move_to(x - width/2, self.dimensions[VERT] - self.borders[VERT] + self.max_value[HORZ] + 3)
> + self.context.show_text(item)
> + next_x = x + width/2
> + x += step
> +
> + def render_vert_labels(self):
> + series_length = len(self.labels[VERT])
> + step = (self.plot_dimensions[VERT] - (series_length + 1)*self.space)/(len(self.labels[VERT]))
> + y = self.border + step/2 + self.space
> +
> + for item in self.labels[VERT]:
> + self.context.set_source_rgba(*self.label_color)
> + width, height = self.context.text_extents(item)[2:4]
> + self.context.move_to(self.borders[HORZ] - width - 5, y + height/2)
> + self.context.show_text(item)
> + y += step + self.space
> + self.labels[VERT].reverse()
> +
> + def render_values(self):
> + self.context.set_source_rgba(*self.value_label_color)
> + self.context.set_font_size(self.font_size * 0.8)
> + if self.stack:
> + for i,group in enumerate(self.series):
> + value = sum(group.to_list())
> + height = self.context.text_extents(str(value))[3]
> + x = self.borders[HORZ] + value*self.steps[HORZ] + 2
> + y = self.borders[VERT] + (i+0.5)*self.steps[VERT] + (i+1)*self.space + height/2
> + self.context.move_to(x, y)
> + self.context.show_text(str(value))
> + else:
> + for i,group in enumerate(self.series):
> + inner_step = self.steps[VERT]/len(group)
> + y0 = self.border + i*self.steps[VERT] + (i+1)*self.space
> + for number,data in enumerate(group):
> + height = self.context.text_extents(str(data.content))[3]
> + self.context.move_to(self.borders[HORZ] + data.content*self.steps[HORZ] + 2, y0 + 0.5*inner_step + height/2, )
> + self.context.show_text(str(data.content))
> + y0 += inner_step
> +
> + def render_plot(self):
> + if self.stack:
> + for i,group in enumerate(self.series):
> + x0 = self.borders[HORZ]
> + y0 = self.borders[VERT] + i*self.steps[VERT] + (i+1)*self.space
> + for number,data in enumerate(group):
> + if self.series_colors[number][4] in ('radial','linear') :
> + linear = cairo.LinearGradient( data.content*self.steps[HORZ]/2, y0, data.content*self.steps[HORZ]/2, y0 + self.steps[VERT] )
> + color = self.series_colors[number]
> + linear.add_color_stop_rgba(0.0, 3.5*color[0]/5.0, 3.5*color[1]/5.0, 3.5*color[2]/5.0,1.0)
> + linear.add_color_stop_rgba(1.0, *color[:4])
> + self.context.set_source(linear)
> + elif self.series_colors[number][4] == 'solid':
> + self.context.set_source_rgba(*self.series_colors[number][:4])
> + if self.rounded_corners:
> + self.draw_rectangle(number, len(group), x0, y0, x0+data.content*self.steps[HORZ], y0+self.steps[VERT])
> + self.context.fill()
> + else:
> + self.context.rectangle(x0, y0, data.content*self.steps[HORZ], self.steps[VERT])
> + self.context.fill()
> + x0 += data.content*self.steps[HORZ]
> + else:
> + for i,group in enumerate(self.series):
> + inner_step = self.steps[VERT]/len(group)
> + x0 = self.borders[HORZ]
> + y0 = self.border + i*self.steps[VERT] + (i+1)*self.space
> + for number,data in enumerate(group):
> + linear = cairo.LinearGradient(data.content*self.steps[HORZ]/2, y0, data.content*self.steps[HORZ]/2, y0 + inner_step)
> + color = self.series_colors[number]
> + linear.add_color_stop_rgba(0.0, 3.5*color[0]/5.0, 3.5*color[1]/5.0, 3.5*color[2]/5.0,1.0)
> + linear.add_color_stop_rgba(1.0, *color[:4])
> + self.context.set_source(linear)
> + if self.rounded_corners and data.content != 0:
> + BarPlot.draw_round_rectangle(self,x0, y0, x0 + data.content*self.steps[HORZ], y0 + inner_step)
> + self.context.fill()
> + else:
> + self.context.rectangle(x0, y0, data.content*self.steps[HORZ], inner_step)
> + self.context.fill()
> + y0 += inner_step
> +
> +class VerticalBarPlot(BarPlot):
> + def __init__(self,
> + surface = None,
> + data = None,
> + width = 640,
> + height = 480,
> + background = "white light_gray",
> + border = 0,
> + display_values = False,
> + grid = False,
> + rounded_corners = False,
> + stack = False,
> + three_dimension = False,
> + series_labels = None,
> + x_labels = None,
> + y_labels = None,
> + x_bounds = None,
> + y_bounds = None,
> + series_colors = None):
> +
> + BarPlot.__init__(self, surface, data, width, height, background, border,
> + display_values, grid, rounded_corners, stack, three_dimension,
> + x_labels, y_labels, x_bounds, y_bounds, series_colors, VERT)
> + self.series_labels = series_labels
> +
> + def calc_vert_extents(self):
> + self.calc_extents(VERT)
> + if self.labels[VERT] and not self.labels[HORZ]:
> + self.borders[VERT] += 10
> +
> + def draw_rectangle_bottom(self, x0, y0, x1, y1):
> + self.context.move_to(x1,y1)
> + self.context.arc(x1-5, y1-5, 5, 0, math.pi/2)
> + self.context.line_to(x0+5, y1)
> + self.context.arc(x0+5, y1-5, 5, math.pi/2, math.pi)
> + self.context.line_to(x0, y0)
> + self.context.line_to(x1, y0)
> + self.context.line_to(x1, y1)
> + self.context.close_path()
> +
> + def draw_rectangle_top(self, x0, y0, x1, y1):
> + self.context.arc(x0+5, y0+5, 5, -math.pi, -math.pi/2)
> + self.context.line_to(x1-5, y0)
> + self.context.arc(x1-5, y0+5, 5, -math.pi/2, 0)
> + self.context.line_to(x1, y1)
> + self.context.line_to(x0, y1)
> + self.context.line_to(x0, y0)
> + self.context.close_path()
> +
> + def draw_rectangle(self, index, length, x0, y0, x1, y1):
> + if length == 1:
> + BarPlot.draw_rectangle(self, x0, y0, x1, y1)
> + elif index == 0:
> + self.draw_rectangle_bottom(x0, y0, x1, y1)
> + elif index == length-1:
> + self.draw_rectangle_top(x0, y0, x1, y1)
> + else:
> + self.context.rectangle(x0, y0, x1-x0, y1-y0)
> +
> + def render_grid(self):
> + self.context.set_source_rgba(0.8, 0.8, 0.8)
> + if self.labels[VERT]:
> + lines = len(self.labels[VERT])
> + vertical_step = float(self.plot_dimensions[self.main_dir])/(lines-1)
> + y = self.borders[VERT] + self.value_label
> + else:
> + lines = 11
> + vertical_step = float(self.plot_dimensions[self.main_dir])/(lines-1)
> + y = 1.2*self.border + self.value_label
> + for x in xrange(0, lines):
> + self.context.move_to(self.borders[HORZ], y)
> + self.context.line_to(self.dimensions[HORZ] - self.border, y)
> + self.context.stroke()
> + y += vertical_step
> +
> + def render_ground(self):
> + self.draw_3d_rectangle_front(self.borders[HORZ], self.dimensions[VERT] - self.borders[VERT],
> + self.dimensions[HORZ] - self.borders[HORZ], self.dimensions[VERT] - self.borders[VERT] + 5, 10)
> + self.context.fill()
> +
> + self.draw_3d_rectangle_side (self.borders[HORZ], self.dimensions[VERT] - self.borders[VERT],
> + self.dimensions[HORZ] - self.borders[HORZ], self.dimensions[VERT] - self.borders[VERT] + 5, 10)
> + self.context.fill()
> +
> + self.draw_3d_rectangle_top (self.borders[HORZ], self.dimensions[VERT] - self.borders[VERT],
> + self.dimensions[HORZ] - self.borders[HORZ], self.dimensions[VERT] - self.borders[VERT] + 5, 10)
> + self.context.fill()
> +
> + def render_horz_labels(self):
> + series_length = len(self.labels[HORZ])
> + step = float (self.plot_dimensions[HORZ] - (series_length + 1)*self.space)/len(self.labels[HORZ])
> + x = self.borders[HORZ] + step/2 + self.space
> + next_x = 0
> +
> + for item in self.labels[HORZ]:
> + self.context.set_source_rgba(*self.label_color)
> + width = self.context.text_extents(item)[2]
> + if x - width/2 > next_x and x - width/2 > self.borders[HORZ]:
> + self.context.move_to(x - width/2, self.dimensions[VERT] - self.borders[VERT] + self.max_value[HORZ] + 3)
> + self.context.show_text(item)
> + next_x = x + width/2
> + x += step + self.space
> +
> + def render_vert_labels(self):
> + self.context.set_source_rgba(*self.label_color)
> + y = self.borders[VERT] + self.value_label
> + step = (self.dimensions[VERT] - 2*self.borders[VERT] - self.value_label)/(len(self.labels[VERT]) - 1)
> + self.labels[VERT].reverse()
> + for item in self.labels[VERT]:
> + width, height = self.context.text_extents(item)[2:4]
> + self.context.move_to(self.borders[HORZ] - width - 5, y + height/2)
> + self.context.show_text(item)
> + y += step
> + self.labels[VERT].reverse()
> +
> + def render_values(self):
> + self.context.set_source_rgba(*self.value_label_color)
> + self.context.set_font_size(self.font_size * 0.8)
> + if self.stack:
> + for i,group in enumerate(self.series):
> + value = sum(group.to_list())
> + width = self.context.text_extents(str(value))[2]
> + x = self.borders[HORZ] + (i+0.5)*self.steps[HORZ] + (i+1)*self.space - width/2
> + y = value*self.steps[VERT] + 2
> + self.context.move_to(x, self.plot_top-y)
> + self.context.show_text(str(value))
> + else:
> + for i,group in enumerate(self.series):
> + inner_step = self.steps[HORZ]/len(group)
> + x0 = self.borders[HORZ] + i*self.steps[HORZ] + (i+1)*self.space
> + for number,data in enumerate(group):
> + width = self.context.text_extents(str(data.content))[2]
> + self.context.move_to(x0 + 0.5*inner_step - width/2, self.plot_top - data.content*self.steps[VERT] - 2)
> + self.context.show_text(str(data.content))
> + x0 += inner_step
> +
> + def render_plot(self):
> + if self.stack:
> + for i,group in enumerate(self.series):
> + x0 = self.borders[HORZ] + i*self.steps[HORZ] + (i+1)*self.space
> + y0 = 0
> + for number,data in enumerate(group):
> + if self.series_colors[number][4] in ('linear','radial'):
> + linear = cairo.LinearGradient( x0, data.content*self.steps[VERT]/2, x0 + self.steps[HORZ], data.content*self.steps[VERT]/2 )
> + color = self.series_colors[number]
> + linear.add_color_stop_rgba(0.0, 3.5*color[0]/5.0, 3.5*color[1]/5.0, 3.5*color[2]/5.0,1.0)
> + linear.add_color_stop_rgba(1.0, *color[:4])
> + self.context.set_source(linear)
> + elif self.series_colors[number][4] == 'solid':
> + self.context.set_source_rgba(*self.series_colors[number][:4])
> + if self.rounded_corners:
> + self.draw_rectangle(number, len(group), x0, self.plot_top - y0 - data.content*self.steps[VERT], x0 + self.steps[HORZ], self.plot_top - y0)
> + self.context.fill()
> + else:
> + self.context.rectangle(x0, self.plot_top - y0 - data.content*self.steps[VERT], self.steps[HORZ], data.content*self.steps[VERT])
> + self.context.fill()
> + y0 += data.content*self.steps[VERT]
> + else:
> + for i,group in enumerate(self.series):
> + inner_step = self.steps[HORZ]/len(group)
> + y0 = self.borders[VERT]
> + x0 = self.borders[HORZ] + i*self.steps[HORZ] + (i+1)*self.space
> + for number,data in enumerate(group):
> + if self.series_colors[number][4] == 'linear':
> + linear = cairo.LinearGradient( x0, data.content*self.steps[VERT]/2, x0 + inner_step, data.content*self.steps[VERT]/2 )
> + color = self.series_colors[number]
> + linear.add_color_stop_rgba(0.0, 3.5*color[0]/5.0, 3.5*color[1]/5.0, 3.5*color[2]/5.0,1.0)
> + linear.add_color_stop_rgba(1.0, *color[:4])
> + self.context.set_source(linear)
> + elif self.series_colors[number][4] == 'solid':
> + self.context.set_source_rgba(*self.series_colors[number][:4])
> + if self.rounded_corners and data.content != 0:
> + BarPlot.draw_round_rectangle(self, x0, self.plot_top - data.content*self.steps[VERT], x0+inner_step, self.plot_top)
> + self.context.fill()
> + elif self.three_dimension:
> + self.draw_3d_rectangle_front(x0, self.plot_top - data.content*self.steps[VERT], x0+inner_step, self.plot_top, 5)
> + self.context.fill()
> + self.draw_3d_rectangle_side(x0, self.plot_top - data.content*self.steps[VERT], x0+inner_step, self.plot_top, 5)
> + self.context.fill()
> + self.draw_3d_rectangle_top(x0, self.plot_top - data.content*self.steps[VERT], x0+inner_step, self.plot_top, 5)
> + self.context.fill()
> + else:
> + self.context.rectangle(x0, self.plot_top - data.content*self.steps[VERT], inner_step, data.content*self.steps[VERT])
> + self.context.fill()
> +
> + x0 += inner_step
> +
> +class StreamChart(VerticalBarPlot):
> + def __init__(self,
> + surface = None,
> + data = None,
> + width = 640,
> + height = 480,
> + background = "white light_gray",
> + border = 0,
> + grid = False,
> + series_legend = None,
> + x_labels = None,
> + x_bounds = None,
> + y_bounds = None,
> + series_colors = None):
> +
> + VerticalBarPlot.__init__(self, surface, data, width, height, background, border,
> + False, grid, False, True, False,
> + None, x_labels, None, x_bounds, y_bounds, series_colors)
> +
> + def calc_steps(self):
> + other_dir = other_direction(self.main_dir)
> + self.series_amplitude = self.bounds[self.main_dir][1] - self.bounds[self.main_dir][0]
> + if self.series_amplitude:
> + self.steps[self.main_dir] = float(self.plot_dimensions[self.main_dir])/self.series_amplitude
> + else:
> + self.steps[self.main_dir] = 0.00
> + series_length = len(self.data)
> + self.steps[other_dir] = float(self.plot_dimensions[other_dir])/series_length
> +
> + def render_legend(self):
> + pass
> +
> + def ground(self, index):
> + sum_values = sum(self.data[index])
> + return -0.5*sum_values
> +
> + def calc_angles(self):
> + middle = self.plot_top - self.plot_dimensions[VERT]/2.0
> + self.angles = [tuple([0.0 for x in range(len(self.data)+1)])]
> + for x_index in range(1, len(self.data)-1):
> + t = []
> + x0 = self.borders[HORZ] + (0.5 + x_index - 1)*self.steps[HORZ]
> + x2 = self.borders[HORZ] + (0.5 + x_index + 1)*self.steps[HORZ]
> + y0 = middle - self.ground(x_index-1)*self.steps[VERT]
> + y2 = middle - self.ground(x_index+1)*self.steps[VERT]
> + t.append(math.atan(float(y0-y2)/(x0-x2)))
> + for data_index in range(len(self.data[x_index])):
> + x0 = self.borders[HORZ] + (0.5 + x_index - 1)*self.steps[HORZ]
> + x2 = self.borders[HORZ] + (0.5 + x_index + 1)*self.steps[HORZ]
> + y0 = middle - self.ground(x_index-1)*self.steps[VERT] - self.data[x_index-1][data_index]*self.steps[VERT]
> + y2 = middle - self.ground(x_index+1)*self.steps[VERT] - self.data[x_index+1][data_index]*self.steps[VERT]
> +
> + for i in range(0,data_index):
> + y0 -= self.data[x_index-1][i]*self.steps[VERT]
> + y2 -= self.data[x_index+1][i]*self.steps[VERT]
> +
> + if data_index == len(self.data[0])-1 and False:
> + self.context.set_source_rgba(0.0,0.0,0.0,0.3)
> + self.context.move_to(x0,y0)
> + self.context.line_to(x2,y2)
> + self.context.stroke()
> + self.context.arc(x0,y0,2,0,2*math.pi)
> + self.context.fill()
> + t.append(math.atan(float(y0-y2)/(x0-x2)))
> + self.angles.append(tuple(t))
> + self.angles.append(tuple([0.0 for x in range(len(self.data)+1)]))
> +
> + def render_plot(self):
> + self.calc_angles()
> + middle = self.plot_top - self.plot_dimensions[VERT]/2.0
> + p = 0.4*self.steps[HORZ]
> + for data_index in range(len(self.data[0])-1,-1,-1):
> + self.context.set_source_rgba(*self.series_colors[data_index][:4])
> +
> + #draw the upper line
> + for x_index in range(len(self.data)-1) :
> + x1 = self.borders[HORZ] + (0.5 + x_index)*self.steps[HORZ]
> + y1 = middle - self.ground(x_index)*self.steps[VERT] - self.data[x_index][data_index]*self.steps[VERT]
> + x2 = self.borders[HORZ] + (0.5 + x_index + 1)*self.steps[HORZ]
> + y2 = middle - self.ground(x_index + 1)*self.steps[VERT] - self.data[x_index + 1][data_index]*self.steps[VERT]
> +
> + for i in range(0,data_index):
> + y1 -= self.data[x_index][i]*self.steps[VERT]
> + y2 -= self.data[x_index+1][i]*self.steps[VERT]
> +
> + if x_index == 0:
> + self.context.move_to(x1,y1)
> +
> + ang1 = self.angles[x_index][data_index+1]
> + ang2 = self.angles[x_index+1][data_index+1] + math.pi
> + self.context.curve_to(x1+p*math.cos(ang1),y1+p*math.sin(ang1),
> + x2+p*math.cos(ang2),y2+p*math.sin(ang2),
> + x2,y2)
> +
> + for x_index in range(len(self.data)-1,0,-1) :
> + x1 = self.borders[HORZ] + (0.5 + x_index)*self.steps[HORZ]
> + y1 = middle - self.ground(x_index)*self.steps[VERT]
> + x2 = self.borders[HORZ] + (0.5 + x_index - 1)*self.steps[HORZ]
> + y2 = middle - self.ground(x_index - 1)*self.steps[VERT]
> +
> + for i in range(0,data_index):
> + y1 -= self.data[x_index][i]*self.steps[VERT]
> + y2 -= self.data[x_index-1][i]*self.steps[VERT]
> +
> + if x_index == len(self.data)-1:
> + self.context.line_to(x1,y1+2)
> +
> + #revert angles by pi degrees to take the turn back
> + ang1 = self.angles[x_index][data_index] + math.pi
> + ang2 = self.angles[x_index-1][data_index]
> + self.context.curve_to(x1+p*math.cos(ang1),y1+p*math.sin(ang1),
> + x2+p*math.cos(ang2),y2+p*math.sin(ang2),
> + x2,y2+2)
> +
> + self.context.close_path()
> + self.context.fill()
> +
> + if False:
> + self.context.move_to(self.borders[HORZ] + 0.5*self.steps[HORZ], middle)
> + for x_index in range(len(self.data)-1) :
> + x1 = self.borders[HORZ] + (0.5 + x_index)*self.steps[HORZ]
> + y1 = middle - self.ground(x_index)*self.steps[VERT] - self.data[x_index][data_index]*self.steps[VERT]
> + x2 = self.borders[HORZ] + (0.5 + x_index + 1)*self.steps[HORZ]
> + y2 = middle - self.ground(x_index + 1)*self.steps[VERT] - self.data[x_index + 1][data_index]*self.steps[VERT]
> +
> + for i in range(0,data_index):
> + y1 -= self.data[x_index][i]*self.steps[VERT]
> + y2 -= self.data[x_index+1][i]*self.steps[VERT]
> +
> + ang1 = self.angles[x_index][data_index+1]
> + ang2 = self.angles[x_index+1][data_index+1] + math.pi
> + self.context.set_source_rgba(1.0,0.0,0.0)
> + self.context.arc(x1+p*math.cos(ang1),y1+p*math.sin(ang1),2,0,2*math.pi)
> + self.context.fill()
> + self.context.set_source_rgba(0.0,0.0,0.0)
> + self.context.arc(x2+p*math.cos(ang2),y2+p*math.sin(ang2),2,0,2*math.pi)
> + self.context.fill()
> + '''self.context.set_source_rgba(0.0,0.0,0.0,0.3)
> + self.context.arc(x2,y2,2,0,2*math.pi)
> + self.context.fill()'''
> + self.context.move_to(x1,y1)
> + self.context.line_to(x1+p*math.cos(ang1),y1+p*math.sin(ang1))
> + self.context.stroke()
> + self.context.move_to(x2,y2)
> + self.context.line_to(x2+p*math.cos(ang2),y2+p*math.sin(ang2))
> + self.context.stroke()
> + if False:
> + for x_index in range(len(self.data)-1,0,-1) :
> + x1 = self.borders[HORZ] + (0.5 + x_index)*self.steps[HORZ]
> + y1 = middle - self.ground(x_index)*self.steps[VERT]
> + x2 = self.borders[HORZ] + (0.5 + x_index - 1)*self.steps[HORZ]
> + y2 = middle - self.ground(x_index - 1)*self.steps[VERT]
> +
> + for i in range(0,data_index):
> + y1 -= self.data[x_index][i]*self.steps[VERT]
> + y2 -= self.data[x_index-1][i]*self.steps[VERT]
> +
> + #revert angles by pi degrees to take the turn back
> + ang1 = self.angles[x_index][data_index] + math.pi
> + ang2 = self.angles[x_index-1][data_index]
> + self.context.set_source_rgba(0.0,1.0,0.0)
> + self.context.arc(x1+p*math.cos(ang1),y1+p*math.sin(ang1),2,0,2*math.pi)
> + self.context.fill()
> + self.context.set_source_rgba(0.0,0.0,1.0)
> + self.context.arc(x2+p*math.cos(ang2),y2+p*math.sin(ang2),2,0,2*math.pi)
> + self.context.fill()
> + '''self.context.set_source_rgba(0.0,0.0,0.0,0.3)
> + self.context.arc(x2,y2,2,0,2*math.pi)
> + self.context.fill()'''
> + self.context.move_to(x1,y1)
> + self.context.line_to(x1+p*math.cos(ang1),y1+p*math.sin(ang1))
> + self.context.stroke()
> + self.context.move_to(x2,y2)
> + self.context.line_to(x2+p*math.cos(ang2),y2+p*math.sin(ang2))
> + self.context.stroke()
> + #break
> +
> + #self.context.arc(self.dimensions[HORZ]/2, self.dimensions[VERT]/2,50,0,3*math.pi/2)
> + #self.context.fill()
> +
> +
> +class PiePlot(Plot):
> + #TODO: Check the old cairoplot, graphs aren't matching
> + def __init__ (self,
> + surface = None,
> + data = None,
> + width = 640,
> + height = 480,
> + background = "white light_gray",
> + gradient = False,
> + shadow = False,
> + colors = None):
> +
> + Plot.__init__( self, surface, data, width, height, background, series_colors = colors )
> + self.center = (self.dimensions[HORZ]/2, self.dimensions[VERT]/2)
> + self.total = sum( self.series.to_list() )
> + self.radius = min(self.dimensions[HORZ]/3,self.dimensions[VERT]/3)
> + self.gradient = gradient
> + self.shadow = shadow
> +
> + def sort_function(x,y):
> + return x.content - y.content
> +
> + def load_series(self, data, x_labels=None, y_labels=None, series_colors=None):
> + Plot.load_series(self, data, x_labels, y_labels, series_colors)
> + # Already done inside series
> + #self.data = sorted(self.data)
> +
> + def draw_piece(self, angle, next_angle):
> + self.context.move_to(self.center[0],self.center[1])
> + self.context.line_to(self.center[0] + self.radius*math.cos(angle), self.center[1] + self.radius*math.sin(angle))
> + self.context.arc(self.center[0], self.center[1], self.radius, angle, next_angle)
> + self.context.line_to(self.center[0], self.center[1])
> + self.context.close_path()
> +
> + def render(self):
> + self.render_background()
> + self.render_bounding_box()
> + if self.shadow:
> + self.render_shadow()
> + self.render_plot()
> + self.render_series_labels()
> +
> + def render_shadow(self):
> + horizontal_shift = 3
> + vertical_shift = 3
> + self.context.set_source_rgba(0, 0, 0, 0.5)
> + self.context.arc(self.center[0] + horizontal_shift, self.center[1] + vertical_shift, self.radius, 0, 2*math.pi)
> + self.context.fill()
> +
> + def render_series_labels(self):
> + angle = 0
> + next_angle = 0
> + x0,y0 = self.center
> + cr = self.context
> + for number,key in enumerate(self.series_labels):
> + # self.data[number] should be just a number
> + data = sum(self.series[number].to_list())
> +
> + next_angle = angle + 2.0*math.pi*data/self.total
> + cr.set_source_rgba(*self.series_colors[number][:4])
> + w = cr.text_extents(key)[2]
> + if (angle + next_angle)/2 < math.pi/2 or (angle + next_angle)/2 > 3*math.pi/2:
> + cr.move_to(x0 + (self.radius+10)*math.cos((angle+next_angle)/2), y0 + (self.radius+10)*math.sin((angle+next_angle)/2) )
> + else:
> + cr.move_to(x0 + (self.radius+10)*math.cos((angle+next_angle)/2) - w, y0 + (self.radius+10)*math.sin((angle+next_angle)/2) )
> + cr.show_text(key)
> + angle = next_angle
> +
> + def render_plot(self):
> + angle = 0
> + next_angle = 0
> + x0,y0 = self.center
> + cr = self.context
> + for number,group in enumerate(self.series):
> + # Group should be just a number
> + data = sum(group.to_list())
> + next_angle = angle + 2.0*math.pi*data/self.total
> + if self.gradient or self.series_colors[number][4] in ('linear','radial'):
> + gradient_color = cairo.RadialGradient(self.center[0], self.center[1], 0, self.center[0], self.center[1], self.radius)
> + gradient_color.add_color_stop_rgba(0.3, *self.series_colors[number][:4])
> + gradient_color.add_color_stop_rgba(1, self.series_colors[number][0]*0.7,
> + self.series_colors[number][1]*0.7,
> + self.series_colors[number][2]*0.7,
> + self.series_colors[number][3])
> + cr.set_source(gradient_color)
> + else:
> + cr.set_source_rgba(*self.series_colors[number][:4])
> +
> + self.draw_piece(angle, next_angle)
> + cr.fill()
> +
> + cr.set_source_rgba(1.0, 1.0, 1.0)
> + self.draw_piece(angle, next_angle)
> + cr.stroke()
> +
> + angle = next_angle
> +
> +class DonutPlot(PiePlot):
> + def __init__ (self,
> + surface = None,
> + data = None,
> + width = 640,
> + height = 480,
> + background = "white light_gray",
> + gradient = False,
> + shadow = False,
> + colors = None,
> + inner_radius=-1):
> +
> + Plot.__init__( self, surface, data, width, height, background, series_colors = colors )
> +
> + self.center = ( self.dimensions[HORZ]/2, self.dimensions[VERT]/2 )
> + self.total = sum( self.series.to_list() )
> + self.radius = min( self.dimensions[HORZ]/3,self.dimensions[VERT]/3 )
> + self.inner_radius = inner_radius*self.radius
> +
> + if inner_radius == -1:
> + self.inner_radius = self.radius/3
> +
> + self.gradient = gradient
> + self.shadow = shadow
> +
> + def draw_piece(self, angle, next_angle):
> + self.context.move_to(self.center[0] + (self.inner_radius)*math.cos(angle), self.center[1] + (self.inner_radius)*math.sin(angle))
> + self.context.line_to(self.center[0] + self.radius*math.cos(angle), self.center[1] + self.radius*math.sin(angle))
> + self.context.arc(self.center[0], self.center[1], self.radius, angle, next_angle)
> + self.context.line_to(self.center[0] + (self.inner_radius)*math.cos(next_angle), self.center[1] + (self.inner_radius)*math.sin(next_angle))
> + self.context.arc_negative(self.center[0], self.center[1], self.inner_radius, next_angle, angle)
> + self.context.close_path()
> +
> + def render_shadow(self):
> + horizontal_shift = 3
> + vertical_shift = 3
> + self.context.set_source_rgba(0, 0, 0, 0.5)
> + self.context.arc(self.center[0] + horizontal_shift, self.center[1] + vertical_shift, self.inner_radius, 0, 2*math.pi)
> + self.context.arc_negative(self.center[0] + horizontal_shift, self.center[1] + vertical_shift, self.radius, 0, -2*math.pi)
> + self.context.fill()
> +
> +class GanttChart (Plot) :
> + def __init__(self,
> + surface = None,
> + data = None,
> + width = 640,
> + height = 480,
> + x_labels = None,
> + y_labels = None,
> + colors = None):
> + self.bounds = {}
> + self.max_value = {}
> + Plot.__init__(self, surface, data, width, height, x_labels = x_labels, y_labels = y_labels, series_colors = colors)
> +
> + def load_series(self, data, x_labels=None, y_labels=None, series_colors=None):
> + Plot.load_series(self, data, x_labels, y_labels, series_colors)
> + self.calc_boundaries()
> +
> + def calc_boundaries(self):
> + self.bounds[HORZ] = (0,len(self.series))
> + end_pos = max(self.series.to_list())
> +
> + #for group in self.series:
> + # if hasattr(item, "__delitem__"):
> + # for sub_item in item:
> + # end_pos = max(sub_item)
> + # else:
> + # end_pos = max(item)
> + self.bounds[VERT] = (0,end_pos)
> +
> + def calc_extents(self, direction):
> + self.max_value[direction] = 0
> + if self.labels[direction]:
> + self.max_value[direction] = max(self.context.text_extents(item)[2] for item in self.labels[direction])
> + else:
> + self.max_value[direction] = self.context.text_extents( str(self.bounds[direction][1] + 1) )[2]
> +
> + def calc_horz_extents(self):
> + self.calc_extents(HORZ)
> + self.borders[HORZ] = 100 + self.max_value[HORZ]
> +
> + def calc_vert_extents(self):
> + self.calc_extents(VERT)
> + self.borders[VERT] = self.dimensions[VERT]/(self.bounds[HORZ][1] + 1)
> +
> + def calc_steps(self):
> + self.horizontal_step = (self.dimensions[HORZ] - self.borders[HORZ])/(len(self.labels[VERT]))
> + self.vertical_step = self.borders[VERT]
> +
> + def render(self):
> + self.calc_horz_extents()
> + self.calc_vert_extents()
> + self.calc_steps()
> + self.render_background()
> +
> + self.render_labels()
> + self.render_grid()
> + self.render_plot()
> +
> + def render_background(self):
> + cr = self.context
> + cr.set_source_rgba(255,255,255)
> + cr.rectangle(0,0,self.dimensions[HORZ], self.dimensions[VERT])
> + cr.fill()
> + for number,group in enumerate(self.series):
> + linear = cairo.LinearGradient(self.dimensions[HORZ]/2, self.borders[VERT] + number*self.vertical_step,
> + self.dimensions[HORZ]/2, self.borders[VERT] + (number+1)*self.vertical_step)
> + linear.add_color_stop_rgba(0,1.0,1.0,1.0,1.0)
> + linear.add_color_stop_rgba(1.0,0.9,0.9,0.9,1.0)
> + cr.set_source(linear)
> + cr.rectangle(0,self.borders[VERT] + number*self.vertical_step,self.dimensions[HORZ],self.vertical_step)
> + cr.fill()
> +
> + def render_grid(self):
> + cr = self.context
> + cr.set_source_rgba(0.7, 0.7, 0.7)
> + cr.set_dash((1,0,0,0,0,0,1))
> + cr.set_line_width(0.5)
> + for number,label in enumerate(self.labels[VERT]):
> + h = cr.text_extents(label)[3]
> + cr.move_to(self.borders[HORZ] + number*self.horizontal_step, self.vertical_step/2 + h)
> + cr.line_to(self.borders[HORZ] + number*self.horizontal_step, self.dimensions[VERT])
> + cr.stroke()
> +
> + def render_labels(self):
> + self.context.set_font_size(0.02 * self.dimensions[HORZ])
> +
> + self.render_horz_labels()
> + self.render_vert_labels()
> +
> + def render_horz_labels(self):
> + cr = self.context
> + labels = self.labels[HORZ]
> + if not labels:
> + labels = [str(i) for i in range(1, self.bounds[HORZ][1] + 1) ]
> + for number,label in enumerate(labels):
> + if label != None:
> + cr.set_source_rgba(0.5, 0.5, 0.5)
> + w,h = cr.text_extents(label)[2], cr.text_extents(label)[3]
> + cr.move_to(40,self.borders[VERT] + number*self.vertical_step + self.vertical_step/2 + h/2)
> + cr.show_text(label)
> +
> + def render_vert_labels(self):
> + cr = self.context
> + labels = self.labels[VERT]
> + if not labels:
> + labels = [str(i) for i in range(1, self.bounds[VERT][1] + 1) ]
> + for number,label in enumerate(labels):
> + w,h = cr.text_extents(label)[2], cr.text_extents(label)[3]
> + cr.move_to(self.borders[HORZ] + number*self.horizontal_step - w/2, self.vertical_step/2)
> + cr.show_text(label)
> +
> + def render_rectangle(self, x0, y0, x1, y1, color):
> + self.draw_shadow(x0, y0, x1, y1)
> + self.draw_rectangle(x0, y0, x1, y1, color)
> +
> + def draw_rectangular_shadow(self, gradient, x0, y0, w, h):
> + self.context.set_source(gradient)
> + self.context.rectangle(x0,y0,w,h)
> + self.context.fill()
> +
> + def draw_circular_shadow(self, x, y, radius, ang_start, ang_end, mult, shadow):
> + gradient = cairo.RadialGradient(x, y, 0, x, y, 2*radius)
> + gradient.add_color_stop_rgba(0, 0, 0, 0, shadow)
> + gradient.add_color_stop_rgba(1, 0, 0, 0, 0)
> + self.context.set_source(gradient)
> + self.context.move_to(x,y)
> + self.context.line_to(x + mult[0]*radius,y + mult[1]*radius)
> + self.context.arc(x, y, 8, ang_start, ang_end)
> + self.context.line_to(x,y)
> + self.context.close_path()
> + self.context.fill()
> +
> + def draw_rectangle(self, x0, y0, x1, y1, color):
> + cr = self.context
> + middle = (x0+x1)/2
> + linear = cairo.LinearGradient(middle,y0,middle,y1)
> + linear.add_color_stop_rgba(0,3.5*color[0]/5.0, 3.5*color[1]/5.0, 3.5*color[2]/5.0,1.0)
> + linear.add_color_stop_rgba(1,*color[:4])
> + cr.set_source(linear)
> +
> + cr.arc(x0+5, y0+5, 5, 0, 2*math.pi)
> + cr.arc(x1-5, y0+5, 5, 0, 2*math.pi)
> + cr.arc(x0+5, y1-5, 5, 0, 2*math.pi)
> + cr.arc(x1-5, y1-5, 5, 0, 2*math.pi)
> + cr.rectangle(x0+5,y0,x1-x0-10,y1-y0)
> + cr.rectangle(x0,y0+5,x1-x0,y1-y0-10)
> + cr.fill()
> +
> + def draw_shadow(self, x0, y0, x1, y1):
> + shadow = 0.4
> + h_mid = (x0+x1)/2
> + v_mid = (y0+y1)/2
> + h_linear_1 = cairo.LinearGradient(h_mid,y0-4,h_mid,y0+4)
> + h_linear_2 = cairo.LinearGradient(h_mid,y1-4,h_mid,y1+4)
> + v_linear_1 = cairo.LinearGradient(x0-4,v_mid,x0+4,v_mid)
> + v_linear_2 = cairo.LinearGradient(x1-4,v_mid,x1+4,v_mid)
> +
> + h_linear_1.add_color_stop_rgba( 0, 0, 0, 0, 0)
> + h_linear_1.add_color_stop_rgba( 1, 0, 0, 0, shadow)
> + h_linear_2.add_color_stop_rgba( 0, 0, 0, 0, shadow)
> + h_linear_2.add_color_stop_rgba( 1, 0, 0, 0, 0)
> + v_linear_1.add_color_stop_rgba( 0, 0, 0, 0, 0)
> + v_linear_1.add_color_stop_rgba( 1, 0, 0, 0, shadow)
> + v_linear_2.add_color_stop_rgba( 0, 0, 0, 0, shadow)
> + v_linear_2.add_color_stop_rgba( 1, 0, 0, 0, 0)
> +
> + self.draw_rectangular_shadow(h_linear_1,x0+4,y0-4,x1-x0-8,8)
> + self.draw_rectangular_shadow(h_linear_2,x0+4,y1-4,x1-x0-8,8)
> + self.draw_rectangular_shadow(v_linear_1,x0-4,y0+4,8,y1-y0-8)
> + self.draw_rectangular_shadow(v_linear_2,x1-4,y0+4,8,y1-y0-8)
> +
> + self.draw_circular_shadow(x0+4, y0+4, 4, math.pi, 3*math.pi/2, (-1,0), shadow)
> + self.draw_circular_shadow(x1-4, y0+4, 4, 3*math.pi/2, 2*math.pi, (0,-1), shadow)
> + self.draw_circular_shadow(x0+4, y1-4, 4, math.pi/2, math.pi, (0,1), shadow)
> + self.draw_circular_shadow(x1-4, y1-4, 4, 0, math.pi/2, (1,0), shadow)
> +
> + def render_plot(self):
> + for index,group in enumerate(self.series):
> + for data in group:
> + self.render_rectangle(self.borders[HORZ] + data.content[0]*self.horizontal_step,
> + self.borders[VERT] + index*self.vertical_step + self.vertical_step/4.0,
> + self.borders[HORZ] + data.content[1]*self.horizontal_step,
> + self.borders[VERT] + index*self.vertical_step + 3.0*self.vertical_step/4.0,
> + self.series_colors[index])
> +
> +# Function definition
> +
> +def scatter_plot(name,
> + data = None,
> + errorx = None,
> + errory = None,
> + width = 640,
> + height = 480,
> + background = "white light_gray",
> + border = 0,
> + axis = False,
> + dash = False,
> + discrete = False,
> + dots = False,
> + grid = False,
> + series_legend = False,
> + x_labels = None,
> + y_labels = None,
> + x_bounds = None,
> + y_bounds = None,
> + z_bounds = None,
> + x_title = None,
> + y_title = None,
> + series_colors = None,
> + circle_colors = None):
> +
> + '''
> + - Function to plot scatter data.
> +
> + - Parameters
> +
> + data - The values to be ploted might be passed in a two basic:
> + list of points: [(0,0), (0,1), (0,2)] or [(0,0,1), (0,1,4), (0,2,1)]
> + lists of coordinates: [ [0,0,0] , [0,1,2] ] or [ [0,0,0] , [0,1,2] , [1,4,1] ]
> + Notice that these kinds of that can be grouped in order to form more complex data
> + using lists of lists or dictionaries;
> + series_colors - Define color values for each of the series
> + circle_colors - Define a lower and an upper bound for the circle colors for variable radius
> + (3 dimensions) series
> + '''
> +
> + plot = ScatterPlot( name, data, errorx, errory, width, height, background, border,
> + axis, dash, discrete, dots, grid, series_legend, x_labels, y_labels,
> + x_bounds, y_bounds, z_bounds, x_title, y_title, series_colors, circle_colors )
> + plot.render()
> + plot.commit()
> +
> +def dot_line_plot(name,
> + data,
> + width,
> + height,
> + background = "white light_gray",
> + border = 0,
> + axis = False,
> + dash = False,
> + dots = False,
> + grid = False,
> + series_legend = False,
> + x_labels = None,
> + y_labels = None,
> + x_bounds = None,
> + y_bounds = None,
> + x_title = None,
> + y_title = None,
> + series_colors = None):
> + '''
> + - Function to plot graphics using dots and lines.
> +
> + dot_line_plot (name, data, width, height, background = "white light_gray", border = 0, axis = False, grid = False, x_labels = None, y_labels = None, x_bounds = None, y_bounds = None)
> +
> + - Parameters
> +
> + name - Name of the desired output file, no need to input the .svg as it will be added at runtim;
> + data - The list, list of lists or dictionary holding the data to be plotted;
> + width, height - Dimensions of the output image;
> + background - A 3 element tuple representing the rgb color expected for the background or a new cairo linear gradient.
> + If left None, a gray to white gradient will be generated;
> + border - Distance in pixels of a square border into which the graphics will be drawn;
> + axis - Whether or not the axis are to be drawn;
> + dash - Boolean or a list or a dictionary of booleans indicating whether or not the associated series should be drawn in dashed mode;
> + dots - Whether or not dots should be drawn on each point;
> + grid - Whether or not the gris is to be drawn;
> + series_legend - Whether or not the legend is to be drawn;
> + x_labels, y_labels - lists of strings containing the horizontal and vertical labels for the axis;
> + x_bounds, y_bounds - tuples containing the lower and upper value bounds for the data to be plotted;
> + x_title - Whether or not to plot a title over the x axis.
> + y_title - Whether or not to plot a title over the y axis.
> +
> + - Examples of use
> +
> + data = [0, 1, 3, 8, 9, 0, 10, 10, 2, 1]
> + CairoPlot.dot_line_plot('teste', data, 400, 300)
> +
> + data = { "john" : [10, 10, 10, 10, 30], "mary" : [0, 0, 3, 5, 15], "philip" : [13, 32, 11, 25, 2] }
> + x_labels = ["jan/2008", "feb/2008", "mar/2008", "apr/2008", "may/2008" ]
> + CairoPlot.dot_line_plot( 'test', data, 400, 300, axis = True, grid = True,
> + series_legend = True, x_labels = x_labels )
> + '''
> + plot = DotLinePlot( name, data, width, height, background, border,
> + axis, dash, dots, grid, series_legend, x_labels, y_labels,
> + x_bounds, y_bounds, x_title, y_title, series_colors )
> + plot.render()
> + plot.commit()
> +
> +def function_plot(name,
> + data,
> + width,
> + height,
> + background = "white light_gray",
> + border = 0,
> + axis = True,
> + dots = False,
> + discrete = False,
> + grid = False,
> + series_legend = False,
> + x_labels = None,
> + y_labels = None,
> + x_bounds = None,
> + y_bounds = None,
> + x_title = None,
> + y_title = None,
> + series_colors = None,
> + step = 1):
> +
> + '''
> + - Function to plot functions.
> +
> + function_plot(name, data, width, height, background = "white light_gray", border = 0, axis = True, grid = False, dots = False, x_labels = None, y_labels = None, x_bounds = None, y_bounds = None, step = 1, discrete = False)
> +
> + - Parameters
> +
> + name - Name of the desired output file, no need to input the .svg as it will be added at runtim;
> + data - The list, list of lists or dictionary holding the data to be plotted;
> + width, height - Dimensions of the output image;
> + background - A 3 element tuple representing the rgb color expected for the background or a new cairo linear gradient.
> + If left None, a gray to white gradient will be generated;
> + border - Distance in pixels of a square border into which the graphics will be drawn;
> + axis - Whether or not the axis are to be drawn;
> + grid - Whether or not the gris is to be drawn;
> + dots - Whether or not dots should be shown at each point;
> + x_labels, y_labels - lists of strings containing the horizontal and vertical labels for the axis;
> + x_bounds, y_bounds - tuples containing the lower and upper value bounds for the data to be plotted;
> + step - the horizontal distance from one point to the other. The smaller, the smoother the curve will be;
> + discrete - whether or not the function should be plotted in discrete format.
> +
> + - Example of use
> +
> + data = lambda x : x**2
> + CairoPlot.function_plot('function4', data, 400, 300, grid = True, x_bounds=(-10,10), step = 0.1)
> + '''
> +
> + plot = FunctionPlot( name, data, width, height, background, border,
> + axis, discrete, dots, grid, series_legend, x_labels, y_labels,
> + x_bounds, y_bounds, x_title, y_title, series_colors, step )
> + plot.render()
> + plot.commit()
> +
> +def pie_plot( name, data, width, height, background = "white light_gray", gradient = False, shadow = False, colors = None ):
> +
> + '''
> + - Function to plot pie graphics.
> +
> + pie_plot(name, data, width, height, background = "white light_gray", gradient = False, colors = None)
> +
> + - Parameters
> +
> + name - Name of the desired output file, no need to input the .svg as it will be added at runtim;
> + data - The list, list of lists or dictionary holding the data to be plotted;
> + width, height - Dimensions of the output image;
> + background - A 3 element tuple representing the rgb color expected for the background or a new cairo linear gradient.
> + If left None, a gray to white gradient will be generated;
> + gradient - Whether or not the pie color will be painted with a gradient;
> + shadow - Whether or not there will be a shadow behind the pie;
> + colors - List of slices colors.
> +
> + - Example of use
> +
> + teste_data = {"john" : 123, "mary" : 489, "philip" : 890 , "suzy" : 235}
> + CairoPlot.pie_plot("pie_teste", teste_data, 500, 500)
> + '''
> +
> + plot = PiePlot( name, data, width, height, background, gradient, shadow, colors )
> + plot.render()
> + plot.commit()
> +
> +def donut_plot(name, data, width, height, background = "white light_gray", gradient = False, shadow = False, colors = None, inner_radius = -1):
> +
> + '''
> + - Function to plot donut graphics.
> +
> + donut_plot(name, data, width, height, background = "white light_gray", gradient = False, inner_radius = -1)
> +
> + - Parameters
> +
> + name - Name of the desired output file, no need to input the .svg as it will be added at runtim;
> + data - The list, list of lists or dictionary holding the data to be plotted;
> + width, height - Dimensions of the output image;
> + background - A 3 element tuple representing the rgb color expected for the background or a new cairo linear gradient.
> + If left None, a gray to white gradient will be generated;
> + shadow - Whether or not there will be a shadow behind the donut;
> + gradient - Whether or not the donut color will be painted with a gradient;
> + colors - List of slices colors;
> + inner_radius - The radius of the donut's inner circle.
> +
> + - Example of use
> +
> + teste_data = {"john" : 123, "mary" : 489, "philip" : 890 , "suzy" : 235}
> + CairoPlot.donut_plot("donut_teste", teste_data, 500, 500)
> + '''
> +
> + plot = DonutPlot(name, data, width, height, background, gradient, shadow, colors, inner_radius)
> + plot.render()
> + plot.commit()
> +
> +def gantt_chart(name, pieces, width, height, x_labels, y_labels, colors):
> +
> + '''
> + - Function to generate Gantt Charts.
> +
> + gantt_chart(name, pieces, width, height, x_labels, y_labels, colors):
> +
> + - Parameters
> +
> + name - Name of the desired output file, no need to input the .svg as it will be added at runtim;
> + pieces - A list defining the spaces to be drawn. The user must pass, for each line, the index of its start and the index of its end. If a line must have two or more spaces, they must be passed inside a list;
> + width, height - Dimensions of the output image;
> + x_labels - A list of names for each of the vertical lines;
> + y_labels - A list of names for each of the horizontal spaces;
> + colors - List containing the colors expected for each of the horizontal spaces
> +
> + - Example of use
> +
> + pieces = [ (0.5,5.5) , [(0,4),(6,8)] , (5.5,7) , (7,8)]
> + x_labels = [ 'teste01', 'teste02', 'teste03', 'teste04']
> + y_labels = [ '0001', '0002', '0003', '0004', '0005', '0006', '0007', '0008', '0009', '0010' ]
> + colors = [ (1.0, 0.0, 0.0), (1.0, 0.7, 0.0), (1.0, 1.0, 0.0), (0.0, 1.0, 0.0) ]
> + CairoPlot.gantt_chart('gantt_teste', pieces, 600, 300, x_labels, y_labels, colors)
> + '''
> +
> + plot = GanttChart(name, pieces, width, height, x_labels, y_labels, colors)
> + plot.render()
> + plot.commit()
> +
> +def vertical_bar_plot(name,
> + data,
> + width,
> + height,
> + background = "white light_gray",
> + border = 0,
> + display_values = False,
> + grid = False,
> + rounded_corners = False,
> + stack = False,
> + three_dimension = False,
> + series_labels = None,
> + x_labels = None,
> + y_labels = None,
> + x_bounds = None,
> + y_bounds = None,
> + colors = None):
> + #TODO: Fix docstring for vertical_bar_plot
> + '''
> + - Function to generate vertical Bar Plot Charts.
> +
> + bar_plot(name, data, width, height, background, border, grid, rounded_corners, three_dimension,
> + x_labels, y_labels, x_bounds, y_bounds, colors):
> +
> + - Parameters
> +
> + name - Name of the desired output file, no need to input the .svg as it will be added at runtime;
> + data - The list, list of lists or dictionary holding the data to be plotted;
> + width, height - Dimensions of the output image;
> + background - A 3 element tuple representing the rgb color expected for the background or a new cairo linear gradient.
> + If left None, a gray to white gradient will be generated;
> + border - Distance in pixels of a square border into which the graphics will be drawn;
> + grid - Whether or not the gris is to be drawn;
> + rounded_corners - Whether or not the bars should have rounded corners;
> + three_dimension - Whether or not the bars should be drawn in pseudo 3D;
> + x_labels, y_labels - lists of strings containing the horizontal and vertical labels for the axis;
> + x_bounds, y_bounds - tuples containing the lower and upper value bounds for the data to be plotted;
> + colors - List containing the colors expected for each of the bars.
> +
> + - Example of use
> +
> + data = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
> + CairoPlot.vertical_bar_plot ('bar2', data, 400, 300, border = 20, grid = True, rounded_corners = False)
> + '''
> +
> + plot = VerticalBarPlot(name, data, width, height, background, border,
> + display_values, grid, rounded_corners, stack, three_dimension,
> + series_labels, x_labels, y_labels, x_bounds, y_bounds, colors)
> + plot.render()
> + plot.commit()
> +
> +def horizontal_bar_plot(name,
> + data,
> + width,
> + height,
> + background = "white light_gray",
> + border = 0,
> + display_values = False,
> + grid = False,
> + rounded_corners = False,
> + stack = False,
> + three_dimension = False,
> + series_labels = None,
> + x_labels = None,
> + y_labels = None,
> + x_bounds = None,
> + y_bounds = None,
> + colors = None):
> +
> + #TODO: Fix docstring for horizontal_bar_plot
> + '''
> + - Function to generate Horizontal Bar Plot Charts.
> +
> + bar_plot(name, data, width, height, background, border, grid, rounded_corners, three_dimension,
> + x_labels, y_labels, x_bounds, y_bounds, colors):
> +
> + - Parameters
> +
> + name - Name of the desired output file, no need to input the .svg as it will be added at runtime;
> + data - The list, list of lists or dictionary holding the data to be plotted;
> + width, height - Dimensions of the output image;
> + background - A 3 element tuple representing the rgb color expected for the background or a new cairo linear gradient.
> + If left None, a gray to white gradient will be generated;
> + border - Distance in pixels of a square border into which the graphics will be drawn;
> + grid - Whether or not the gris is to be drawn;
> + rounded_corners - Whether or not the bars should have rounded corners;
> + three_dimension - Whether or not the bars should be drawn in pseudo 3D;
> + x_labels, y_labels - lists of strings containing the horizontal and vertical labels for the axis;
> + x_bounds, y_bounds - tuples containing the lower and upper value bounds for the data to be plotted;
> + colors - List containing the colors expected for each of the bars.
> +
> + - Example of use
> +
> + data = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
> + CairoPlot.bar_plot ('bar2', data, 400, 300, border = 20, grid = True, rounded_corners = False)
> + '''
> +
> + plot = HorizontalBarPlot(name, data, width, height, background, border,
> + display_values, grid, rounded_corners, stack, three_dimension,
> + series_labels, x_labels, y_labels, x_bounds, y_bounds, colors)
> + plot.render()
> + plot.commit()
> +
> +def stream_chart(name,
> + data,
> + width,
> + height,
> + background = "white light_gray",
> + border = 0,
> + grid = False,
> + series_legend = None,
> + x_labels = None,
> + x_bounds = None,
> + y_bounds = None,
> + colors = None):
> +
> + #TODO: Fix docstring for horizontal_bar_plot
> + plot = StreamChart(name, data, width, height, background, border,
> + grid, series_legend, x_labels, x_bounds, y_bounds, colors)
> + plot.render()
> + plot.commit()
> +
> +
> +if __name__ == "__main__":
> + import tests
> + import seriestests
> diff --git a/bindings/python/examples/output_format_modules/pprint_table.py b/bindings/python/examples/output_format_modules/pprint_table.py
> new file mode 100644
> index 0000000..1cd8620
> --- /dev/null
> +++ b/bindings/python/examples/output_format_modules/pprint_table.py
> @@ -0,0 +1,35 @@
> +# This module is used to pretty-print a table
> +# Adapted from
> +# http://ginstrom.com/scribbles/2007/09/04/pretty-printing-a-table-in-python/
> +
> +import sys
> +
> +def get_max_width(table, index):
> + """Get the maximum width of the given column index"""
> +
> + return max([len(str(row[index])) for row in table])
> +
> +
> +def pprint_table(table, nbLeft=1, out=sys.stdout):
> + """
> + Prints out a table of data, padded for alignment
> + @param table: The table to print. A list of lists.
> + Each row must have the same number of columns.
> + @param nbLeft: The number of columns aligned left
> + @param out: Output stream (file-like object)
> + """
> +
> + col_paddings = []
> +
> + for i in range(len(table[0])):
> + col_paddings.append(get_max_width(table, i))
> +
> + for row in table:
> + # left cols
> + for i in range(nbLeft):
> + print >> out, str(row[i]).ljust(col_paddings[i] + 1),
> + # rest of the cols
> + for i in range(nbLeft, len(row)):
> + col = str(row[i]).rjust(col_paddings[i] + 2)
> + print >> out, col,
> + print >> out
> diff --git a/bindings/python/examples/output_format_modules/series.py b/bindings/python/examples/output_format_modules/series.py
> new file mode 100755
> index 0000000..8e8b236
> --- /dev/null
> +++ b/bindings/python/examples/output_format_modules/series.py
> @@ -0,0 +1,1140 @@
> +#!/usr/bin/env python
> +# -*- coding: utf-8 -*-
> +
> +# Serie.py
> +#
> +# Copyright (c) 2008 Magnun Leno da Silva
> +#
> +# Author: Magnun Leno da Silva <magnun.leno at gmail.com>
> +#
> +# This program is free software; you can redistribute it and/or
> +# modify it under the terms of the GNU Lesser General Public License
> +# as published by the Free Software Foundation; either version 2 of
> +# the License, or (at your option) any later version.
> +#
> +# This program is distributed in the hope that it will be useful,
> +# but WITHOUT ANY WARRANTY; without even the implied warranty of
> +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
> +# GNU General Public License for more details.
> +#
> +# You should have received a copy of the GNU Lesser General Public
> +# License along with this program; if not, write to the Free Software
> +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
> +# USA
> +
> +# Contributor: Rodrigo Moreiro Araujo <alf.rodrigo at gmail.com>
> +
> +#import cairoplot
> +import doctest
> +
> +NUMTYPES = (int, float, long)
> +LISTTYPES = (list, tuple)
> +STRTYPES = (str, unicode)
> +FILLING_TYPES = ['linear', 'solid', 'gradient']
> +DEFAULT_COLOR_FILLING = 'solid'
> +#TODO: Define default color list
> +DEFAULT_COLOR_LIST = None
> +
> +class Data(object):
> + '''
> + Class that models the main data structure.
> + It can hold:
> + - a number type (int, float or long)
> + - a tuple, witch represents a point and can have 2 or 3 items (x,y,z)
> + - if a list is passed it will be converted to a tuple.
> +
> + obs: In case a tuple is passed it will convert to tuple
> + '''
> + def __init__(self, data=None, name=None, parent=None):
> + '''
> + Starts main atributes from the Data class
> + @name - Name for each point;
> + @content - The real data, can be an int, float, long or tuple, which
> + represents a point (x,y) or (x,y,z);
> + @parent - A pointer that give the data access to it's parent.
> +
> + Usage:
> + >>> d = Data(name='empty'); print d
> + empty: ()
> + >>> d = Data((1,1),'point a'); print d
> + point a: (1, 1)
> + >>> d = Data((1,2,3),'point b'); print d
> + point b: (1, 2, 3)
> + >>> d = Data([2,3],'point c'); print d
> + point c: (2, 3)
> + >>> d = Data(12, 'simple value'); print d
> + simple value: 12
> + '''
> + # Initial values
> + self.__content = None
> + self.__name = None
> +
> + # Setting passed values
> + self.parent = parent
> + self.name = name
> + self.content = data
> +
> + # Name property
> + @apply
> + def name():
> + doc = '''
> + Name is a read/write property that controls the input of name.
> + - If passed an invalid value it cleans the name with None
> +
> + Usage:
> + >>> d = Data(13); d.name = 'name_test'; print d
> + name_test: 13
> + >>> d.name = 11; print d
> + 13
> + >>> d.name = 'other_name'; print d
> + other_name: 13
> + >>> d.name = None; print d
> + 13
> + >>> d.name = 'last_name'; print d
> + last_name: 13
> + >>> d.name = ''; print d
> + 13
> + '''
> + def fget(self):
> + '''
> + returns the name as a string
> + '''
> + return self.__name
> +
> + def fset(self, name):
> + '''
> + Sets the name of the Data
> + '''
> + if type(name) in STRTYPES and len(name) > 0:
> + self.__name = name
> + else:
> + self.__name = None
> +
> +
> +
> + return property(**locals())
> +
> + # Content property
> + @apply
> + def content():
> + doc = '''
> + Content is a read/write property that validate the data passed
> + and return it.
> +
> + Usage:
> + >>> d = Data(); d.content = 13; d.content
> + 13
> + >>> d = Data(); d.content = (1,2); d.content
> + (1, 2)
> + >>> d = Data(); d.content = (1,2,3); d.content
> + (1, 2, 3)
> + >>> d = Data(); d.content = [1,2,3]; d.content
> + (1, 2, 3)
> + >>> d = Data(); d.content = [1.5,.2,3.3]; d.content
> + (1.5, 0.20000000000000001, 3.2999999999999998)
> + '''
> + def fget(self):
> + '''
> + Return the content of Data
> + '''
> + return self.__content
> +
> + def fset(self, data):
> + '''
> + Ensures that data is a valid tuple/list or a number (int, float
> + or long)
> + '''
> + # Type: None
> + if data is None:
> + self.__content = None
> + return
> +
> + # Type: Int or Float
> + elif type(data) in NUMTYPES:
> + self.__content = data
> +
> + # Type: List or Tuple
> + elif type(data) in LISTTYPES:
> + # Ensures the correct size
> + if len(data) not in (2, 3):
> + raise TypeError, "Data (as list/tuple) must have 2 or 3 items"
> + return
> +
> + # Ensures that all items in list/tuple is a number
> + isnum = lambda x : type(x) not in NUMTYPES
> +
> + if max(map(isnum, data)):
> + # An item in data isn't an int or a float
> + raise TypeError, "All content of data must be a number (int or float)"
> +
> + # Convert the tuple to list
> + if type(data) is list:
> + data = tuple(data)
> +
> + # Append a copy and sets the type
> + self.__content = data[:]
> +
> + # Unknown type!
> + else:
> + self.__content = None
> + raise TypeError, "Data must be an int, float or a tuple with two or three items"
> + return
> +
> + return property(**locals())
> +
> +
> + def clear(self):
> + '''
> + Clear the all Data (content, name and parent)
> + '''
> + self.content = None
> + self.name = None
> + self.parent = None
> +
> + def copy(self):
> + '''
> + Returns a copy of the Data structure
> + '''
> + # The copy
> + new_data = Data()
> + if self.content is not None:
> + # If content is a point
> + if type(self.content) is tuple:
> + new_data.__content = self.content[:]
> +
> + # If content is a number
> + else:
> + new_data.__content = self.content
> +
> + # If it has a name
> + if self.name is not None:
> + new_data.__name = self.name
> +
> + return new_data
> +
> + def __str__(self):
> + '''
> + Return a string representation of the Data structure
> + '''
> + if self.name is None:
> + if self.content is None:
> + return ''
> + return str(self.content)
> + else:
> + if self.content is None:
> + return self.name+": ()"
> + return self.name+": "+str(self.content)
> +
> + def __len__(self):
> + '''
> + Return the length of the Data.
> + - If it's a number return 1;
> + - If it's a list return it's length;
> + - If its None return 0.
> + '''
> + if self.content is None:
> + return 0
> + elif type(self.content) in NUMTYPES:
> + return 1
> + return len(self.content)
> +
> +
> +
> +
> +class Group(object):
> + '''
> + Class that models a group of data. Every value (int, float, long, tuple
> + or list) passed is converted to a list of Data.
> + It can receive:
> + - A single number (int, float, long);
> + - A list of numbers;
> + - A tuple of numbers;
> + - An instance of Data;
> + - A list of Data;
> +
> + Obs: If a tuple with 2 or 3 items is passed it is converted to a point.
> + If a tuple with only 1 item is passed it's converted to a number;
> + If a tuple with more than 2 items is passed it's converted to a
> + list of numbers
> + '''
> + def __init__(self, group=None, name=None, parent=None):
> + '''
> + Starts main atributes in Group instance.
> + @data_list - a list of data which forms the group;
> + @range - a range that represent the x axis of possible functions;
> + @name - name of the data group;
> + @parent - the Serie parent of this group.
> +
> + Usage:
> + >>> g = Group(13, 'simple number'); print g
> + simple number ['13']
> + >>> g = Group((1,2), 'simple point'); print g
> + simple point ['(1, 2)']
> + >>> g = Group([1,2,3,4], 'list of numbers'); print g
> + list of numbers ['1', '2', '3', '4']
> + >>> g = Group((1,2,3,4),'int in tuple'); print g
> + int in tuple ['1', '2', '3', '4']
> + >>> g = Group([(1,2),(2,3),(3,4)], 'list of points'); print g
> + list of points ['(1, 2)', '(2, 3)', '(3, 4)']
> + >>> g = Group([[1,2,3],[1,2,3]], '2D coordinate lists'); print g
> + 2D coordinated lists ['(1, 1)', '(2, 2)', '(3, 3)']
> + >>> g = Group([[1,2],[1,2],[1,2]], '3D coordinate lists'); print g
> + 3D coordinated lists ['(1, 1, 1)', '(2, 2, 2)']
> + '''
> + # Initial values
> + self.__data_list = []
> + self.__range = []
> + self.__name = None
> +
> +
> + self.parent = parent
> + self.name = name
> + self.data_list = group
> +
> + # Name property
> + @apply
> + def name():
> + doc = '''
> + Name is a read/write property that controls the input of name.
> + - If passed an invalid value it cleans the name with None
> +
> + Usage:
> + >>> g = Group(13); g.name = 'name_test'; print g
> + name_test ['13']
> + >>> g.name = 11; print g
> + ['13']
> + >>> g.name = 'other_name'; print g
> + other_name ['13']
> + >>> g.name = None; print g
> + ['13']
> + >>> g.name = 'last_name'; print g
> + last_name ['13']
> + >>> g.name = ''; print g
> + ['13']
> + '''
> + def fget(self):
> + '''
> + Returns the name as a string
> + '''
> + return self.__name
> +
> + def fset(self, name):
> + '''
> + Sets the name of the Group
> + '''
> + if type(name) in STRTYPES and len(name) > 0:
> + self.__name = name
> + else:
> + self.__name = None
> +
> + return property(**locals())
> +
> + # data_list property
> + @apply
> + def data_list():
> + doc = '''
> + The data_list is a read/write property that can be a list of
> + numbers, a list of points or a list of 2 or 3 coordinate lists. This
> + property uses mainly the self.add_data method.
> +
> + Usage:
> + >>> g = Group(); g.data_list = 13; print g
> + ['13']
> + >>> g.data_list = (1,2); print g
> + ['(1, 2)']
> + >>> g.data_list = Data((1,2),'point a'); print g
> + ['point a: (1, 2)']
> + >>> g.data_list = [1,2,3]; print g
> + ['1', '2', '3']
> + >>> g.data_list = (1,2,3,4); print g
> + ['1', '2', '3', '4']
> + >>> g.data_list = [(1,2),(2,3),(3,4)]; print g
> + ['(1, 2)', '(2, 3)', '(3, 4)']
> + >>> g.data_list = [[1,2],[1,2]]; print g
> + ['(1, 1)', '(2, 2)']
> + >>> g.data_list = [[1,2],[1,2],[1,2]]; print g
> + ['(1, 1, 1)', '(2, 2, 2)']
> + >>> g.range = (10); g.data_list = lambda x:x**2; print g
> + ['(0.0, 0.0)', '(1.0, 1.0)', '(2.0, 4.0)', '(3.0, 9.0)', '(4.0, 16.0)', '(5.0, 25.0)', '(6.0, 36.0)', '(7.0, 49.0)', '(8.0, 64.0)', '(9.0, 81.0)']
> + '''
> + def fget(self):
> + '''
> + Returns the value of data_list
> + '''
> + return self.__data_list
> +
> + def fset(self, group):
> + '''
> + Ensures that group is valid.
> + '''
> + # None
> + if group is None:
> + self.__data_list = []
> +
> + # Int/float/long or Instance of Data
> + elif type(group) in NUMTYPES or isinstance(group, Data):
> + # Clean data_list
> + self.__data_list = []
> + self.add_data(group)
> +
> + # One point
> + elif type(group) is tuple and len(group) in (2,3):
> + self.__data_list = []
> + self.add_data(group)
> +
> + # list of items
> + elif type(group) in LISTTYPES and type(group[0]) is not list:
> + # Clean data_list
> + self.__data_list = []
> + for item in group:
> + # try to append and catch an exception
> + self.add_data(item)
> +
> + # function lambda
> + elif callable(group):
> + # Explicit is better than implicit
> + function = group
> + # Has range
> + if len(self.range) is not 0:
> + # Clean data_list
> + self.__data_list = []
> + # Generate values for the lambda function
> + for x in self.range:
> + #self.add_data((x,round(group(x),2)))
> + self.add_data((x,function(x)))
> +
> + # Only have range in parent
> + elif self.parent is not None and len(self.parent.range) is not 0:
> + # Copy parent range
> + self.__range = self.parent.range[:]
> + # Clean data_list
> + self.__data_list = []
> + # Generate values for the lambda function
> + for x in self.range:
> + #self.add_data((x,round(group(x),2)))
> + self.add_data((x,function(x)))
> +
> + # Don't have range anywhere
> + else:
> + # x_data don't exist
> + raise Exception, "Data argument is valid but to use function type please set x_range first"
> +
> + # Coordinate Lists
> + elif type(group) in LISTTYPES and type(group[0]) is list:
> + # Clean data_list
> + self.__data_list = []
> + data = []
> + if len(group) == 3:
> + data = zip(group[0], group[1], group[2])
> + elif len(group) == 2:
> + data = zip(group[0], group[1])
> + else:
> + raise TypeError, "Only one list of coordinates was received."
> +
> + for item in data:
> + self.add_data(item)
> +
> + else:
> + raise TypeError, "Group type not supported"
> +
> + return property(**locals())
> +
> + @apply
> + def range():
> + doc = '''
> + The range is a read/write property that generates a range of values
> + for the x axis of the functions. When passed a tuple it almost works
> + like the built-in range funtion:
> + - 1 item, represent the end of the range started from 0;
> + - 2 items, represents the start and the end, respectively;
> + - 3 items, the last one represents the step;
> +
> + When passed a list the range function understands as a valid range.
> +
> + Usage:
> + >>> g = Group(); g.range = 10; print g.range
> + [0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0]
> + >>> g = Group(); g.range = (5); print g.range
> + [0.0, 1.0, 2.0, 3.0, 4.0]
> + >>> g = Group(); g.range = (1,7); print g.range
> + [1.0, 2.0, 3.0, 4.0, 5.0, 6.0]
> + >>> g = Group(); g.range = (0,10,2); print g.range
> + [0.0, 2.0, 4.0, 6.0, 8.0]
> + >>>
> + >>> g = Group(); g.range = [0]; print g.range
> + [0.0]
> + >>> g = Group(); g.range = [0,10,20]; print g.range
> + [0.0, 10.0, 20.0]
> + '''
> + def fget(self):
> + '''
> + Returns the range
> + '''
> + return self.__range
> +
> + def fset(self, x_range):
> + '''
> + Controls the input of a valid type and generate the range
> + '''
> + # if passed a simple number convert to tuple
> + if type(x_range) in NUMTYPES:
> + x_range = (x_range,)
> +
> + # A list, just convert to float
> + if type(x_range) is list and len(x_range) > 0:
> + # Convert all to float
> + x_range = map(float, x_range)
> + # Prevents repeated values and convert back to list
> + self.__range = list(set(x_range[:]))
> + # Sort the list to ascending order
> + self.__range.sort()
> +
> + # A tuple, must check the lengths and generate the values
> + elif type(x_range) is tuple and len(x_range) in (1,2,3):
> + # Convert all to float
> + x_range = map(float, x_range)
> +
> + # Inital values
> + start = 0.0
> + step = 1.0
> + end = 0.0
> +
> + # Only the end and it can't be less or iqual to 0
> + if len(x_range) is 1 and x_range > 0:
> + end = x_range[0]
> +
> + # The start and the end but the start must be less then the end
> + elif len(x_range) is 2 and x_range[0] < x_range[1]:
> + start = x_range[0]
> + end = x_range[1]
> +
> + # All 3, but the start must be less then the end
> + elif x_range[0] <= x_range[1]:
> + start = x_range[0]
> + end = x_range[1]
> + step = x_range[2]
> +
> + # Starts the range
> + self.__range = []
> + # Generate the range
> + # Can't use the range function because it doesn't support float values
> + while start < end:
> + self.__range.append(start)
> + start += step
> +
> + # Incorrect type
> + else:
> + raise Exception, "x_range must be a list with one or more items or a tuple with 2 or 3 items"
> +
> + return property(**locals())
> +
> + def add_data(self, data, name=None):
> + '''
> + Append a new data to the data_list.
> + - If data is an instance of Data, append it
> + - If it's an int, float, tuple or list create an instance of Data and append it
> +
> + Usage:
> + >>> g = Group()
> + >>> g.add_data(12); print g
> + ['12']
> + >>> g.add_data(7,'other'); print g
> + ['12', 'other: 7']
> + >>>
> + >>> g = Group()
> + >>> g.add_data((1,1),'a'); print g
> + ['a: (1, 1)']
> + >>> g.add_data((2,2),'b'); print g
> + ['a: (1, 1)', 'b: (2, 2)']
> + >>>
> + >>> g.add_data(Data((1,2),'c')); print g
> + ['a: (1, 1)', 'b: (2, 2)', 'c: (1, 2)']
> + '''
> + if not isinstance(data, Data):
> + # Try to convert
> + data = Data(data,name,self)
> +
> + if data.content is not None:
> + self.__data_list.append(data.copy())
> + self.__data_list[-1].parent = self
> +
> +
> + def to_list(self):
> + '''
> + Returns the group as a list of numbers (int, float or long) or a
> + list of tuples (points 2D or 3D).
> +
> + Usage:
> + >>> g = Group([1,2,3,4],'g1'); g.to_list()
> + [1, 2, 3, 4]
> + >>> g = Group([(1,2),(2,3),(3,4)],'g2'); g.to_list()
> + [(1, 2), (2, 3), (3, 4)]
> + >>> g = Group([(1,2,3),(3,4,5)],'g2'); g.to_list()
> + [(1, 2, 3), (3, 4, 5)]
> + '''
> + return [data.content for data in self]
> +
> + def copy(self):
> + '''
> + Returns a copy of this group
> + '''
> + new_group = Group()
> + new_group.__name = self.__name
> + if self.__range is not None:
> + new_group.__range = self.__range[:]
> + for data in self:
> + new_group.add_data(data.copy())
> + return new_group
> +
> + def get_names(self):
> + '''
> + Return a list with the names of all data in this group
> + '''
> + names = []
> + for data in self:
> + if data.name is None:
> + names.append('Data '+str(data.index()+1))
> + else:
> + names.append(data.name)
> + return names
> +
> +
> + def __str__ (self):
> + '''
> + Returns a string representing the Group
> + '''
> + ret = ""
> + if self.name is not None:
> + ret += self.name + " "
> + if len(self) > 0:
> + list_str = [str(item) for item in self]
> + ret += str(list_str)
> + else:
> + ret += "[]"
> + return ret
> +
> + def __getitem__(self, key):
> + '''
> + Makes a Group iterable, based in the data_list property
> + '''
> + return self.data_list[key]
> +
> + def __len__(self):
> + '''
> + Returns the length of the Group, based in the data_list property
> + '''
> + return len(self.data_list)
> +
> +
> +class Colors(object):
> + '''
> + Class that models the colors its labels (names) and its properties, RGB
> + and filling type.
> +
> + It can receive:
> + - A list where each item is a list with 3 or 4 items. The
> + first 3 items represent the RGB values and the last argument
> + defines the filling type. The list will be converted to a dict
> + and each color will receve a name based in its position in the
> + list.
> + - A dictionary where each key will be the color name and its item
> + can be a list with 3 or 4 items. The first 3 items represent
> + the RGB colors and the last argument defines the filling type.
> + '''
> + def __init__(self, color_list=None):
> + '''
> + Start the color_list property
> + @ color_list - the list or dict contaning the colors properties.
> + '''
> + self.__color_list = None
> +
> + self.color_list = color_list
> +
> + @apply
> + def color_list():
> + doc = '''
> + >>> c = Colors([[1,1,1],[2,2,2,'linear'],[3,3,3,'gradient']])
> + >>> print c.color_list
> + {'Color 2': [2, 2, 2, 'linear'], 'Color 3': [3, 3, 3, 'gradient'], 'Color 1': [1, 1, 1, 'solid']}
> + >>> c.color_list = [[1,1,1],(2,2,2,'solid'),(3,3,3,'linear')]
> + >>> print c.color_list
> + {'Color 2': [2, 2, 2, 'solid'], 'Color 3': [3, 3, 3, 'linear'], 'Color 1': [1, 1, 1, 'solid']}
> + >>> c.color_list = {'a':[1,1,1],'b':(2,2,2,'solid'),'c':(3,3,3,'linear'), 'd':(4,4,4)}
> + >>> print c.color_list
> + {'a': [1, 1, 1, 'solid'], 'c': [3, 3, 3, 'linear'], 'b': [2, 2, 2, 'solid'], 'd': [4, 4, 4, 'solid']}
> + '''
> + def fget(self):
> + '''
> + Return the color list
> + '''
> + return self.__color_list
> +
> + def fset(self, color_list):
> + '''
> + Format the color list to a dictionary
> + '''
> + if color_list is None:
> + self.__color_list = None
> + return
> +
> + if type(color_list) in LISTTYPES and type(color_list[0]) in LISTTYPES:
> + old_color_list = color_list[:]
> + color_list = {}
> + for index, color in enumerate(old_color_list):
> + if len(color) is 3 and max(map(type, color)) in NUMTYPES:
> + color_list['Color '+str(index+1)] = list(color)+[DEFAULT_COLOR_FILLING]
> + elif len(color) is 4 and max(map(type, color[:-1])) in NUMTYPES and color[-1] in FILLING_TYPES:
> + color_list['Color '+str(index+1)] = list(color)
> + else:
> + raise TypeError, "Unsuported color format"
> + elif type(color_list) is not dict:
> + raise TypeError, "Unsuported color format"
> +
> + for name, color in color_list.items():
> + if len(color) is 3:
> + if max(map(type, color)) in NUMTYPES:
> + color_list[name] = list(color)+[DEFAULT_COLOR_FILLING]
> + else:
> + raise TypeError, "Unsuported color format"
> + elif len(color) is 4:
> + if max(map(type, color[:-1])) in NUMTYPES and color[-1] in FILLING_TYPES:
> + color_list[name] = list(color)
> + else:
> + raise TypeError, "Unsuported color format"
> + self.__color_list = color_list.copy()
> +
> + return property(**locals())
> +
> +
> +class Series(object):
> + '''
> + Class that models a Series (group of groups). Every value (int, float,
> + long, tuple or list) passed is converted to a list of Group or Data.
> + It can receive:
> + - a single number or point, will be converted to a Group of one Data;
> + - a list of numbers, will be converted to a group of numbers;
> + - a list of tuples, will converted to a single Group of points;
> + - a list of lists of numbers, each 'sublist' will be converted to a
> + group of numbers;
> + - a list of lists of tuples, each 'sublist' will be converted to a
> + group of points;
> + - a list of lists of lists, the content of the 'sublist' will be
> + processed as coordinated lists and the result will be converted to
> + a group of points;
> + - a Dictionary where each item can be the same of the list: number,
> + point, list of numbers, list of points or list of lists (coordinated
> + lists);
> + - an instance of Data;
> + - an instance of group.
> + '''
> + def __init__(self, series=None, name=None, property=[], colors=None):
> + '''
> + Starts main atributes in Group instance.
> + @series - a list, dict of data of which the series is composed;
> + @name - name of the series;
> + @property - a list/dict of properties to be used in the plots of
> + this Series
> +
> + Usage:
> + >>> print Series([1,2,3,4])
> + ["Group 1 ['1', '2', '3', '4']"]
> + >>> print Series([[1,2,3],[4,5,6]])
> + ["Group 1 ['1', '2', '3']", "Group 2 ['4', '5', '6']"]
> + >>> print Series((1,2))
> + ["Group 1 ['(1, 2)']"]
> + >>> print Series([(1,2),(2,3)])
> + ["Group 1 ['(1, 2)', '(2, 3)']"]
> + >>> print Series([[(1,2),(2,3)],[(4,5),(5,6)]])
> + ["Group 1 ['(1, 2)', '(2, 3)']", "Group 2 ['(4, 5)', '(5, 6)']"]
> + >>> print Series([[[1,2,3],[1,2,3],[1,2,3]]])
> + ["Group 1 ['(1, 1, 1)', '(2, 2, 2)', '(3, 3, 3)']"]
> + >>> print Series({'g1':[1,2,3], 'g2':[4,5,6]})
> + ["g1 ['1', '2', '3']", "g2 ['4', '5', '6']"]
> + >>> print Series({'g1':[(1,2),(2,3)], 'g2':[(4,5),(5,6)]})
> + ["g1 ['(1, 2)', '(2, 3)']", "g2 ['(4, 5)', '(5, 6)']"]
> + >>> print Series({'g1':[[1,2],[1,2]], 'g2':[[4,5],[4,5]]})
> + ["g1 ['(1, 1)', '(2, 2)']", "g2 ['(4, 4)', '(5, 5)']"]
> + >>> print Series(Data(1,'d1'))
> + ["Group 1 ['d1: 1']"]
> + >>> print Series(Group([(1,2),(2,3)],'g1'))
> + ["g1 ['(1, 2)', '(2, 3)']"]
> + '''
> + # Intial values
> + self.__group_list = []
> + self.__name = None
> + self.__range = None
> +
> + # TODO: Implement colors with filling
> + self.__colors = None
> +
> + self.name = name
> + self.group_list = series
> + self.colors = colors
> +
> + # Name property
> + @apply
> + def name():
> + doc = '''
> + Name is a read/write property that controls the input of name.
> + - If passed an invalid value it cleans the name with None
> +
> + Usage:
> + >>> s = Series(13); s.name = 'name_test'; print s
> + name_test ["Group 1 ['13']"]
> + >>> s.name = 11; print s
> + ["Group 1 ['13']"]
> + >>> s.name = 'other_name'; print s
> + other_name ["Group 1 ['13']"]
> + >>> s.name = None; print s
> + ["Group 1 ['13']"]
> + >>> s.name = 'last_name'; print s
> + last_name ["Group 1 ['13']"]
> + >>> s.name = ''; print s
> + ["Group 1 ['13']"]
> + '''
> + def fget(self):
> + '''
> + Returns the name as a string
> + '''
> + return self.__name
> +
> + def fset(self, name):
> + '''
> + Sets the name of the Group
> + '''
> + if type(name) in STRTYPES and len(name) > 0:
> + self.__name = name
> + else:
> + self.__name = None
> +
> + return property(**locals())
> +
> +
> +
> + # Colors property
> + @apply
> + def colors():
> + doc = '''
> + >>> s = Series()
> + >>> s.colors = [[1,1,1],[2,2,2,'linear'],[3,3,3,'gradient']]
> + >>> print s.colors
> + {'Color 2': [2, 2, 2, 'linear'], 'Color 3': [3, 3, 3, 'gradient'], 'Color 1': [1, 1, 1, 'solid']}
> + >>> s.colors = [[1,1,1],(2,2,2,'solid'),(3,3,3,'linear')]
> + >>> print s.colors
> + {'Color 2': [2, 2, 2, 'solid'], 'Color 3': [3, 3, 3, 'linear'], 'Color 1': [1, 1, 1, 'solid']}
> + >>> s.colors = {'a':[1,1,1],'b':(2,2,2,'solid'),'c':(3,3,3,'linear'), 'd':(4,4,4)}
> + >>> print s.colors
> + {'a': [1, 1, 1, 'solid'], 'c': [3, 3, 3, 'linear'], 'b': [2, 2, 2, 'solid'], 'd': [4, 4, 4, 'solid']}
> + '''
> + def fget(self):
> + '''
> + Return the color list
> + '''
> + return self.__colors.color_list
> +
> + def fset(self, colors):
> + '''
> + Format the color list to a dictionary
> + '''
> + self.__colors = Colors(colors)
> +
> + return property(**locals())
> +
> + @apply
> + def range():
> + doc = '''
> + The range is a read/write property that generates a range of values
> + for the x axis of the functions. When passed a tuple it almost works
> + like the built-in range funtion:
> + - 1 item, represent the end of the range started from 0;
> + - 2 items, represents the start and the end, respectively;
> + - 3 items, the last one represents the step;
> +
> + When passed a list the range function understands as a valid range.
> +
> + Usage:
> + >>> s = Series(); s.range = 10; print s.range
> + [0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0]
> + >>> s = Series(); s.range = (5); print s.range
> + [0.0, 1.0, 2.0, 3.0, 4.0, 5.0]
> + >>> s = Series(); s.range = (1,7); print s.range
> + [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0]
> + >>> s = Series(); s.range = (0,10,2); print s.range
> + [0.0, 2.0, 4.0, 6.0, 8.0, 10.0]
> + >>>
> + >>> s = Series(); s.range = [0]; print s.range
> + [0.0]
> + >>> s = Series(); s.range = [0,10,20]; print s.range
> + [0.0, 10.0, 20.0]
> + '''
> + def fget(self):
> + '''
> + Returns the range
> + '''
> + return self.__range
> +
> + def fset(self, x_range):
> + '''
> + Controls the input of a valid type and generate the range
> + '''
> + # if passed a simple number convert to tuple
> + if type(x_range) in NUMTYPES:
> + x_range = (x_range,)
> +
> + # A list, just convert to float
> + if type(x_range) is list and len(x_range) > 0:
> + # Convert all to float
> + x_range = map(float, x_range)
> + # Prevents repeated values and convert back to list
> + self.__range = list(set(x_range[:]))
> + # Sort the list to ascending order
> + self.__range.sort()
> +
> + # A tuple, must check the lengths and generate the values
> + elif type(x_range) is tuple and len(x_range) in (1,2,3):
> + # Convert all to float
> + x_range = map(float, x_range)
> +
> + # Inital values
> + start = 0.0
> + step = 1.0
> + end = 0.0
> +
> + # Only the end and it can't be less or iqual to 0
> + if len(x_range) is 1 and x_range > 0:
> + end = x_range[0]
> +
> + # The start and the end but the start must be lesser then the end
> + elif len(x_range) is 2 and x_range[0] < x_range[1]:
> + start = x_range[0]
> + end = x_range[1]
> +
> + # All 3, but the start must be lesser then the end
> + elif x_range[0] < x_range[1]:
> + start = x_range[0]
> + end = x_range[1]
> + step = x_range[2]
> +
> + # Starts the range
> + self.__range = []
> + # Generate the range
> + # Cnat use the range function becouse it don't suport float values
> + while start <= end:
> + self.__range.append(start)
> + start += step
> +
> + # Incorrect type
> + else:
> + raise Exception, "x_range must be a list with one or more item or a tuple with 2 or 3 items"
> +
> + return property(**locals())
> +
> + @apply
> + def group_list():
> + doc = '''
> + The group_list is a read/write property used to pre-process the list
> + of Groups.
> + It can be:
> + - a single number, point or lambda, will be converted to a single
> + Group of one Data;
> + - a list of numbers, will be converted to a group of numbers;
> + - a list of tuples, will converted to a single Group of points;
> + - a list of lists of numbers, each 'sublist' will be converted to
> + a group of numbers;
> + - a list of lists of tuples, each 'sublist' will be converted to a
> + group of points;
> + - a list of lists of lists, the content of the 'sublist' will be
> + processed as coordinated lists and the result will be converted
> + to a group of points;
> + - a list of lambdas, each lambda represents a Group;
> + - a Dictionary where each item can be the same of the list: number,
> + point, list of numbers, list of points, list of lists
> + (coordinated lists) or lambdas
> + - an instance of Data;
> + - an instance of group.
> +
> + Usage:
> + >>> s = Series()
> + >>> s.group_list = [1,2,3,4]; print s
> + ["Group 1 ['1', '2', '3', '4']"]
> + >>> s.group_list = [[1,2,3],[4,5,6]]; print s
> + ["Group 1 ['1', '2', '3']", "Group 2 ['4', '5', '6']"]
> + >>> s.group_list = (1,2); print s
> + ["Group 1 ['(1, 2)']"]
> + >>> s.group_list = [(1,2),(2,3)]; print s
> + ["Group 1 ['(1, 2)', '(2, 3)']"]
> + >>> s.group_list = [[(1,2),(2,3)],[(4,5),(5,6)]]; print s
> + ["Group 1 ['(1, 2)', '(2, 3)']", "Group 2 ['(4, 5)', '(5, 6)']"]
> + >>> s.group_list = [[[1,2,3],[1,2,3],[1,2,3]]]; print s
> + ["Group 1 ['(1, 1, 1)', '(2, 2, 2)', '(3, 3, 3)']"]
> + >>> s.group_list = [(0.5,5.5) , [(0,4),(6,8)] , (5.5,7) , (7,9)]; print s
> + ["Group 1 ['(0.5, 5.5)']", "Group 2 ['(0, 4)', '(6, 8)']", "Group 3 ['(5.5, 7)']", "Group 4 ['(7, 9)']"]
> + >>> s.group_list = {'g1':[1,2,3], 'g2':[4,5,6]}; print s
> + ["g1 ['1', '2', '3']", "g2 ['4', '5', '6']"]
> + >>> s.group_list = {'g1':[(1,2),(2,3)], 'g2':[(4,5),(5,6)]}; print s
> + ["g1 ['(1, 2)', '(2, 3)']", "g2 ['(4, 5)', '(5, 6)']"]
> + >>> s.group_list = {'g1':[[1,2],[1,2]], 'g2':[[4,5],[4,5]]}; print s
> + ["g1 ['(1, 1)', '(2, 2)']", "g2 ['(4, 4)', '(5, 5)']"]
> + >>> s.range = 10
> + >>> s.group_list = lambda x:x*2
> + >>> s.group_list = [lambda x:x*2, lambda x:x**2, lambda x:x**3]; print s
> + ["Group 1 ['(0.0, 0.0)', '(1.0, 2.0)', '(2.0, 4.0)', '(3.0, 6.0)', '(4.0, 8.0)', '(5.0, 10.0)', '(6.0, 12.0)', '(7.0, 14.0)', '(8.0, 16.0)', '(9.0, 18.0)', '(10.0, 20.0)']", "Group 2 ['(0.0, 0.0)', '(1.0, 1.0)', '(2.0, 4.0)', '(3.0, 9.0)', '(4.0, 16.0)', '(5.0, 25.0)', '(6.0, 36.0)', '(7.0, 49.0)', '(8.0, 64.0)', '(9.0, 81.0)', '(10.0, 100.0)']", "Group 3 ['(0.0, 0.0)', '(1.0, 1.0)', '(2.0, 8.0)', '(3.0, 27.0)', '(4.0, 64.0)', '(5.0, 125.0)', '(6.0, 216.0)', '(7.0, 343.0)', '(8.0, 512.0)', '(9.0, 729.0)', '(10.0, 1000.0)']"]
> + >>> s.group_list = {'linear':lambda x:x*2, 'square':lambda x:x**2, 'cubic':lambda x:x**3}; print s
> + ["cubic ['(0.0, 0.0)', '(1.0, 1.0)', '(2.0, 8.0)', '(3.0, 27.0)', '(4.0, 64.0)', '(5.0, 125.0)', '(6.0, 216.0)', '(7.0, 343.0)', '(8.0, 512.0)', '(9.0, 729.0)', '(10.0, 1000.0)']", "linear ['(0.0, 0.0)', '(1.0, 2.0)', '(2.0, 4.0)', '(3.0, 6.0)', '(4.0, 8.0)', '(5.0, 10.0)', '(6.0, 12.0)', '(7.0, 14.0)', '(8.0, 16.0)', '(9.0, 18.0)', '(10.0, 20.0)']", "square ['(0.0, 0.0)', '(1.0, 1.0)', '(2.0, 4.0)', '(3.0, 9.0)', '(4.0, 16.0)', '(5.0, 25.0)', '(6.0, 36.0)', '(7.0, 49.0)', '(8.0, 64.0)', '(9.0, 81.0)', '(10.0, 100.0)']"]
> + >>> s.group_list = Data(1,'d1'); print s
> + ["Group 1 ['d1: 1']"]
> + >>> s.group_list = Group([(1,2),(2,3)],'g1'); print s
> + ["g1 ['(1, 2)', '(2, 3)']"]
> + '''
> + def fget(self):
> + '''
> + Return the group list.
> + '''
> + return self.__group_list
> +
> + def fset(self, series):
> + '''
> + Controls the input of a valid group list.
> + '''
> + #TODO: Add support to the following strem of data: [ (0.5,5.5) , [(0,4),(6,8)] , (5.5,7) , (7,9)]
> +
> + # Type: None
> + if series is None:
> + self.__group_list = []
> +
> + # List or Tuple
> + elif type(series) in LISTTYPES:
> + self.__group_list = []
> +
> + is_function = lambda x: callable(x)
> + # Groups
> + if list in map(type, series) or max(map(is_function, series)):
> + for group in series:
> + self.add_group(group)
> +
> + # single group
> + else:
> + self.add_group(series)
> +
> + #old code
> + ## List of numbers
> + #if type(series[0]) in NUMTYPES or type(series[0]) is tuple:
> + # print series
> + # self.add_group(series)
> + #
> + ## List of anything else
> + #else:
> + # for group in series:
> + # self.add_group(group)
> +
> + # Dict representing series of groups
> + elif type(series) is dict:
> + self.__group_list = []
> + names = series.keys()
> + names.sort()
> + for name in names:
> + self.add_group(Group(series[name],name,self))
> +
> + # A single lambda
> + elif callable(series):
> + self.__group_list = []
> + self.add_group(series)
> +
> + # Int/float, instance of Group or Data
> + elif type(series) in NUMTYPES or isinstance(series, Group) or isinstance(series, Data):
> + self.__group_list = []
> + self.add_group(series)
> +
> + # Default
> + else:
> + raise TypeError, "Serie type not supported"
> +
> + return property(**locals())
> +
> + def add_group(self, group, name=None):
> + '''
> + Append a new group in group_list
> + '''
> + if not isinstance(group, Group):
> + #Try to convert
> + group = Group(group, name, self)
> +
> + if len(group.data_list) is not 0:
> + # Auto naming groups
> + if group.name is None:
> + group.name = "Group "+str(len(self.__group_list)+1)
> +
> + self.__group_list.append(group)
> + self.__group_list[-1].parent = self
> +
> + def copy(self):
> + '''
> + Returns a copy of the Series
> + '''
> + new_series = Series()
> + new_series.__name = self.__name
> + if self.__range is not None:
> + new_series.__range = self.__range[:]
> + #Add color property in the copy method
> + #self.__colors = None
> +
> + for group in self:
> + new_series.add_group(group.copy())
> +
> + return new_series
> +
> + def get_names(self):
> + '''
> + Returns a list of the names of all groups in the Serie
> + '''
> + names = []
> + for group in self:
> + if group.name is None:
> + names.append('Group '+str(group.index()+1))
> + else:
> + names.append(group.name)
> +
> + return names
> +
> + def to_list(self):
> + '''
> + Returns a list with the content of all groups and data
> + '''
> + big_list = []
> + for group in self:
> + for data in group:
> + if type(data.content) in NUMTYPES:
> + big_list.append(data.content)
> + else:
> + big_list = big_list + list(data.content)
> + return big_list
> +
> + def __getitem__(self, key):
> + '''
> + Makes the Series iterable, based in the group_list property
> + '''
> + return self.__group_list[key]
> +
> + def __str__(self):
> + '''
> + Returns a string that represents the Series
> + '''
> + ret = ""
> + if self.name is not None:
> + ret += self.name + " "
> + if len(self) > 0:
> + list_str = [str(item) for item in self]
> + ret += str(list_str)
> + else:
> + ret += "[]"
> + return ret
> +
> + def __len__(self):
> + '''
> + Returns the length of the Series, based in the group_lsit property
> + '''
> + return len(self.group_list)
> +
> +
> +if __name__ == '__main__':
> + doctest.testmod()
> diff --git a/bindings/python/examples/sched_switch.py b/bindings/python/examples/sched_switch.py
> new file mode 100644
> index 0000000..f252ab5
> --- /dev/null
> +++ b/bindings/python/examples/sched_switch.py
> @@ -0,0 +1,110 @@
> +# The script takes one optional argument (pid)
> +# The script will read events based on pid and
> +# print the scheduler switches happening with the process.
> +# If no arguments are passed, it displays all the scheduler switches.
> +# This can be used to understand which tasks schedule out the current
> +# process being traced, and when it gets scheduled in again.
> +# The trace needs PID context (lttng add-context -k -t pid)
> +
> +import sys
> +from babeltrace import *
> +
> +if len(sys.argv) < 2 or len(sys.argv) > 3:
> + raise TypeError("Usage: python sched_switch.py [pid] path/to/trace")
> +elif len(sys.argv) == 3:
> + usePID = True
> +else:
> + usePID = False
> +
> +
> +ctx = Context()
> +ret = ctx.add_trace(sys.argv[len(sys.argv)-1], "ctf")
> +if ret is None:
> + raise IOError("Error adding trace")
> +
> +# Setting iterator
> +bp = IterPos(SEEK_BEGIN)
> +ctf_it = ctf.Iterator(ctx, bp)
> +
> +# Reading events
> +event = ctf_it.read_event()
> +while event is not None:
> + while True:
> + if event.get_name() == "sched_switch":
> + # Getting scope definition
> + sco = event.get_top_level_scope(ctf.scope.STREAM_EVENT_CONTEXT)
> + if sco is None:
> + print("ERROR: Cannot get definition scope for sched_switch")
> + break # Next event
> +
> + # Getting PID
> + pid_field = event.get_field(sco, "_pid")
> + pid = pid_field.get_int64()
> +
> + if ctf.field_error():
> + print("ERROR: Missing PID info for sched_switch")
> + break # Next event
> +
> + if usePID and (pid != long(sys.argv[1])):
> + break # Next event
> +
> + sco = event.get_top_level_scope(ctf.scope.EVENT_FIELDS)
> +
> + # prev_comm
> + field = event.get_field(sco, "_prev_comm")
> + prev_comm = field.get_char_array()
> + if ctf.field_error():
> + print("ERROR: Missing prev_comm context info")
> +
> + # prev_tid
> + field = event.get_field(sco, "_prev_tid")
> + prev_tid = field.get_int64()
> + if ctf.field_error():
> + print("ERROR: Missing prev_tid context info")
> +
> + # prev_prio
> + field = event.get_field(sco, "_prev_prio")
> + prev_prio = field.get_int64()
> + if ctf.field_error():
> + print("ERROR: Missing prev_prio context info")
> +
> + # prev_state
> + field = event.get_field(sco, "_prev_state")
> + prev_state = field.get_int64()
> + if ctf.field_error():
> + print("ERROR: Missing prev_state context info")
> +
> + # next_comm
> + field = event.get_field(sco, "_next_comm")
> + next_comm = field.get_char_array()
> + if ctf.field_error():
> + print("ERROR: Missing next_comm context info")
> +
> + # next_tid
> + field = event.get_field(sco, "_next_tid")
> + next_tid = field.get_int64()
> + if ctf.field_error():
> + print("ERROR: Missing next_tid context info")
> +
> + # next_prio
> + field = event.get_field(sco, "_next_prio")
> + next_prio = field.get_int64()
> + if ctf.field_error():
> + print("ERROR: Missing next_prio context info")
> +
> + # Output
> + print("sched_switch, pid = {}, TS = {}, prev_comm = {},\n\t"
> + "prev_tid = {}, prev_prio = {}, prev_state = {},\n\t"
> + "next_comm = {}, next_tid = {}, next_prio = {}".format(
> + pid, event.get_timestamp(), prev_comm, prev_tid,
> + prev_prio, prev_state, next_comm, next_tid, next_prio))
> +
> + break # Next event
> +
> + # Next event
> + ret = ctf_it.next()
> + if ret < 0:
> + break
> + event = ctf_it.read_event()
> +
> +del ctf_it
> diff --git a/bindings/python/examples/softirqtimes.py b/bindings/python/examples/softirqtimes.py
> new file mode 100644
> index 0000000..19b2deb
> --- /dev/null
> +++ b/bindings/python/examples/softirqtimes.py
> @@ -0,0 +1,130 @@
> +# The script checks the trace for the amount of time
> +# spent from each softirq_raise to softirq_exit.
> +# It prints out the min, max (with timestamp),
> +# average times, the standard deviation and the total count.
> +# Using the cairoplot module, a .svg graph is also outputted
> +# showing the taken time in function of the time since the
> +# beginning of the trace.
> +
> +import sys, math
> +from output_format_modules import cairoplot
> +from babeltrace import *
> +
> +if len(sys.argv) < 2:
> + raise TypeError("Usage: python softirqtimes.py path/to/trace")
> +
> +ctx = Context()
> +ret = ctx.add_trace(sys.argv[1], "ctf")
> +if ret is None:
> + raise IOError("Error adding trace")
> +
> +time_taken = []
> +graph_data = []
> +max_time = (0.0, 0.0) # (val, ts)
> +
> +# tmp template: {(cpu_id, vec):TS raise}
> +tmp = {}
> +largest_val = 0
> +
> +# Setting iterator
> +bp = IterPos(SEEK_BEGIN)
> +ctf_it = ctf.Iterator(ctx, bp)
> +
> +# Reading events
> +event = ctf_it.read_event()
> +start_time = event.get_timestamp()
> +while(event is not None):
> +
> + event_name = event.get_name()
> + error = True
> + appendNext = False
> +
> + if event_name == 'softirq_raise' or event_name == 'softirq_exit':
> + # Recover cpu_id and vec values to make a key to tmp
> + error = False
> + scope = event.get_top_level_scope(ctf.scope.STREAM_PACKET_CONTEXT)
> + field = event.get_field(scope, "cpu_id")
> + cpu_id = field.get_uint64()
> + if ctf.field_error():
> + print("ERROR: Missing cpu_id info for {}".format(
> + event.get_name()))
> + error = True
> +
> + scope = event.get_top_level_scope(ctf.scope.EVENT_FIELDS)
> + field = event.get_field(scope, "_vec")
> + vec = field.get_uint64()
> + if ctf.field_error():
> + print("ERROR: Missing vec info for {}".format(
> + event.get_name()))
> + error = True
> + key = (cpu_id, vec)
> +
> + if event_name == 'softirq_raise' and not error:
> + # Add timestamp to tmp
> + if key in tmp:
> + # If key already exists
> + i = 0
> + while True:
> + # Add index
> + key = (cpu_id, vec, i)
> + if key in tmp:
> + i += 1
> + continue
> + if i > largest_val:
> + largest_val = i
> + break
> +
> + tmp[key] = event.get_timestamp()
> +
> + if event_name == 'softirq_exit' and not error:
> + # Saving data for output
> + # Key check
> + if not (key in tmp):
> + i = 0
> + while i <= largest_val:
> + key = (key[0], key[1], i)
> + if key in tmp:
> + break
> + i += 1
> +
> + raise_timestamp = tmp[key]
> + time_data = event.get_timestamp() - tmp.pop(key)
> + if time_data > max_time[0]:
> + # max_time = (val, ts)
> + max_time = (time_data, raise_timestamp)
> + time_taken.append(time_data)
> + graph_data.append((raise_timestamp - start_time, time_data))
> +
> + # Next Event
> + ret = ctf_it.next()
> + if ret < 0:
> + break
> + event = ctf_it.read_event()
> +
> +
> +del ctf_it
> +
> +# Standard dev. calc.
> +try:
> + mean = sum(time_taken)/float(len(time_taken))
> +except ZeroDivisionError:
> + raise TypeError("empty data")
> +deviations_squared = []
> +for x in time_taken:
> + deviations_squared.append(math.pow((x - mean), 2))
> +try:
> + stddev = math.sqrt(sum(deviations_squared) / (len(deviations_squared) - 1))
> +except ZeroDivisionError:
> + stddev = '-'
> +
> +# Terminal output
> +print("AVG TIME: {} ns".format(mean))
> +print("MIN TIME: {} ns".format(min(time_taken)))
> +print("MAX TIME: {} ns, TS: {}".format(max_time[0], max_time[1]))
> +print("STD DEV: {}".format(stddev))
> +print("TOTAL COUNT: {}".format(len(time_taken)))
> +
> +# Graph output
> +cairoplot.scatter_plot ( 'softirqtimes.svg', data = graph_data,
> + width = 5000, height = 4000, border = 20, axis = True,
> + grid = True, series_colors = ["red"] )
> diff --git a/bindings/python/examples/syscalls_by_pid.py b/bindings/python/examples/syscalls_by_pid.py
> new file mode 100644
> index 0000000..f6127ed
> --- /dev/null
> +++ b/bindings/python/examples/syscalls_by_pid.py
> @@ -0,0 +1,61 @@
> +# The script checks all syscall in the trace and prints a list
> +# showing the number of systemcalls executed by each PID
> +# ordered from greatest to least number of syscalls.
> +# The trace needs PID context (lttng add-context -k -t pid)
> +
> +import sys
> +from babeltrace import *
> +from output_format_modules.pprint_table import pprint_table as pprint
> +
> +if len(sys.argv) < 2 :
> + raise TypeError("Usage: python syscalls_by_pid.py path/to/trace")
> +
> +ctx = Context()
> +ret = ctx.add_trace(sys.argv[1], "ctf")
> +if ret is None:
> + raise IOError("Error adding trace")
> +
> +data = {}
> +
> +# Setting iterator
> +bp = IterPos(SEEK_BEGIN)
> +ctf_it = ctf.Iterator(ctx, bp)
> +
> +# Reading events
> +event = ctf_it.read_event()
> +while event is not None:
> + if event.get_name().find("sys") >= 0:
> + # Getting scope definition
> + sco = event.get_top_level_scope(ctf.scope.STREAM_EVENT_CONTEXT)
> + if sco is None:
> + print("ERROR: Cannot get definition scope for {}".format(
> + event.get_name()))
> + else:
> + # Getting PID
> + pid_field = event.get_field(sco, "_pid")
> + pid = pid_field.get_int64()
> +
> + if ctf.field_error():
> + print("ERROR: Missing PID info for sched_switch".format(
> + event.get_name()))
> + elif pid in data:
> + data[pid] += 1
> + else:
> + data[pid] = 1
> + # Next event
> + ret = ctf_it.next()
> + if ret < 0:
> + break
> + event = ctf_it.read_event()
> +
> +del ctf_it
> +
> +# Setting table for output
> +table = []
> +for item in data:
> + table.append([data[item], item]) # [count, pid]
> +table.sort(reverse = True) # [big count first, pid]
> +for i in range(len(table)):
> + table[i].reverse() # [pid, big count first]
> +table.insert(0, ["PID", "SYSCALL COUNT"])
> +pprint(table)
> diff --git a/bindings/python/python-complements.c b/bindings/python/python-complements.c
> new file mode 100644
> index 0000000..8c4d811
> --- /dev/null
> +++ b/bindings/python/python-complements.c
> @@ -0,0 +1,105 @@
> +/* python-complements.c
> + Needed functions for python binding
> +*/
> +
> +#include "python-complements.h"
> +
> +/* FILE functions
> + ----------------------------------------------------
> +*/
> +
> +FILE *_bt_file_open(char *file_path, char *mode)
> +{
> + FILE *fp = stdout;
> + if (file_path != NULL)
> + fp = fopen(file_path, mode);
> + return fp;
> +}
> +
> +void _bt_file_close(FILE *fp)
> +{
> + if (fp != NULL)
> + fclose(fp);
> +}
> +
> +
> +/* List-related functions
> + ----------------------------------------------------
> +*/
> +
> +/* ctf-field-list */
> +struct definition **_bt_python_field_listcaller(
> + const struct bt_ctf_event *ctf_event,
> + const struct definition *scope)
> +{
> + struct definition **list;
> + unsigned int count;
> + int ret;
> +
> + ret = bt_ctf_get_field_list(ctf_event, scope,
> + (const struct definition * const **)&list, &count);
> +
> + if (ret < 0) /* For python to know an error occured */
> + list = NULL;
> + else /* For python to know the end is reached */
> + list[count] = NULL;
> +
> + return list;
> +}
> +
> +struct definition *_bt_python_field_one_from_list(
> + struct definition **list, int index)
> +{
> + return list[index];
> +}
> +
> +/* event_decl_list */
> +struct bt_ctf_event_decl **_bt_python_event_decl_listcaller(
> + int handle_id, struct bt_context *ctx)
> +{
> + struct bt_ctf_event_decl **list;
> + unsigned int count;
> + int ret;
> +
> + ret = bt_ctf_get_event_decl_list(handle_id, ctx,
> + (struct bt_ctf_event_decl * const **)&list, &count);
> +
> + if (ret < 0) /* For python to know an error occured */
> + list = NULL;
> + else /* For python to know the end is reached */
> + list[count] = NULL;
> +
> + return list;
> +}
> +
> +struct bt_ctf_event_decl *_bt_python_decl_one_from_list(
> + struct bt_ctf_event_decl **list, int index)
> +{
> + return list[index];
> +}
> +
> +/* decl_fields */
> +struct bt_ctf_field_decl **_by_python_field_decl_listcaller(
> + struct bt_ctf_event_decl *event_decl,
> + enum bt_ctf_scope scope)
> +{
> + struct bt_ctf_field_decl **list;
> + unsigned int count;
> + int ret;
> +
> + ret = bt_ctf_get_decl_fields(event_decl, scope,
> + (const struct bt_ctf_field_decl * const **)&list, &count);
> +
> + if (ret < 0) /* For python to know an error occured */
> + list = NULL;
> + else /* For python to know the end is reached */
> + list[count] = NULL;
> +
> + return list;
> +}
> +
> +struct bt_ctf_field_decl *_bt_python_field_decl_one_from_list(
> + struct bt_ctf_field_decl **list, int index)
> +{
> + return list[index];
> +}
> diff --git a/bindings/python/python-complements.h b/bindings/python/python-complements.h
> new file mode 100644
> index 0000000..cdd5528
> --- /dev/null
> +++ b/bindings/python/python-complements.h
> @@ -0,0 +1,36 @@
> +/* python-complements.h
> + Needed functions for python binding
> +*/
> +
> +#include <stdio.h>
> +#include <glib.h>
> +#include <babeltrace/babeltrace.h>
> +#include <babeltrace/format.h>
> +#include <babeltrace/ctf-ir/metadata.h>
> +#include <babeltrace/ctf/events.h>
> +#include <babeltrace/iterator-internal.h>
> +#include <babeltrace/ctf/events-internal.h>
> +
> +/* File */
> +FILE *_bt_file_open(char *file_path, char *mode);
> +void _bt_file_close(FILE *fp);
> +
> +/* ctf-field-list */
> +struct definition **_bt_python_field_listcaller(
> + const struct bt_ctf_event *ctf_event,
> + const struct definition *scope);
> +struct definition *_bt_python_field_one_from_list(
> + struct definition **list, int index);
> +
> +/* event_decl_list */
> +struct bt_ctf_event_decl **_bt_python_event_decl_listcaller(
> + int handle_id, struct bt_context *ctx);
> +struct bt_ctf_event_decl *_bt_python_decl_one_from_list(
> + struct bt_ctf_event_decl **list, int index);
> +
> +/* decl_fields */
> +struct bt_ctf_field_decl **_by_python_field_decl_listcaller(
> + struct bt_ctf_event_decl *event_decl,
> + enum bt_ctf_scope scope);
> +struct bt_ctf_field_decl *_bt_python_field_decl_one_from_list(
> + struct bt_ctf_field_decl **list, int index);
> diff --git a/bootstrap b/bootstrap
> index c507425..f6926ca 100755
> --- a/bootstrap
> +++ b/bootstrap
> @@ -4,7 +4,7 @@ set -x
> if [ ! -e config ]; then
> mkdir config
> fi
> -aclocal
> +aclocal -I m4
> libtoolize --force --copy
> autoheader
> automake --add-missing --copy
> diff --git a/configure.ac b/configure.ac
> index d90479d..f9cff9d 100644
> --- a/configure.ac
> +++ b/configure.ac
> @@ -74,6 +74,41 @@ AC_CHECK_LIB([popt], [poptGetContext], [],
> [AC_MSG_ERROR([Cannot find popt.])]
> )
>
> +
> +# For Python
> +# SWIG version needed or newer:
> +swig_version=2.0.0
> +
> +AC_ARG_ENABLE([python],
> + [AC_HELP_STRING([--disable-python],
> + [do not compile Python bindings])],
> + [], [enable_python=yes])
> +
> +AM_CONDITIONAL([USE_PYTHON], [test "x${enable_python:-yes}" = xyes])
> +
> +if test "x${enable_python:-yes}" = xyes; then
> + AC_MSG_NOTICE([You may configure with --disable-python ]dnl
> +[if you do not want Python bindings.])
> +
> + AX_PKG_SWIG($swig_version, [], [ AC_MSG_ERROR([SWIG $swig_version or newer is needed]) ])
> + AM_PATH_PYTHON
> +
> + AC_ARG_VAR([PYTHON_INCLUDE], [Include flags for python, bypassing python-config])
> + AC_ARG_VAR([PYTHON_CONFIG], [Path to python-config])
> + AS_IF([test -z "$PYTHON_INCLUDE"], [
> + AS_IF([test -z "$PYTHON_CONFIG"], [
> + AC_PATH_PROGS([PYTHON_CONFIG],
> + [python$PYTHON_VERSION-config python-config],
> + [no],
> + [`dirname $PYTHON`])
> + AS_IF([test "$PYTHON_CONFIG" = no], [AC_MSG_ERROR([cannot find python-config for $PYTHON.])])
> + ])
> + AC_MSG_CHECKING([python include flags])
> + PYTHON_INCLUDE=`$PYTHON_CONFIG --includes`
> + AC_MSG_RESULT([$PYTHON_INCLUDE])
> + ])
> +fi
> +
> pkg_modules="gmodule-2.0 >= 2.0.0"
> PKG_CHECK_MODULES(GMODULE, [$pkg_modules])
> AC_SUBST(PACKAGE_LIBS)
> @@ -103,6 +138,8 @@ AC_CONFIG_FILES([
> lib/Makefile
> lib/prio_heap/Makefile
> include/Makefile
> + bindings/Makefile
> + bindings/python/Makefile
> tests/Makefile
> ])
> AC_OUTPUT
> diff --git a/doc/python-howto.txt b/doc/python-howto.txt
> new file mode 100644
> index 0000000..e2ed751
> --- /dev/null
> +++ b/doc/python-howto.txt
> @@ -0,0 +1,70 @@
> +PYTHON BINDINGS
> +----------------
> +
> +This is a brief howto for using the Babeltrace Python module.
> +
> +
> +INSTALLATION:
> +
> +By default, the Python bindings are installed.
> +If you do not wish the Python bindings, you can configure with the
> +--disable-python option during the installation procedure:
> +
> + $ ./configure --disable-python
> +
> +The Python module is automatically generated using SWIG, therefore the
> +swig2.0 package on Debian/Ubuntu is requied.
> +
> +
> +USAGE:
> +
> +Once installed, the Python module can be used by importing it in Python.
> +In the Python interpreter:
> +
> + >>> import babeltrace
> +
> +Then the starting point is to create a context and add a trace to it.
> +
> + >>> ctx = babeltrace.Context()
> + >>> ctx.add_trace("path/to/trace", <format>)
> +
> +Where <format> is a string containing the format name in which the trace
> +was produced. To print a list of available formats to the standard
> +output, it is possible to use the print_format_list function.
> +
> + >>> out = babeltrace.File(None) # This returns stdout
> + >>> babeltrace.print_format_list(out)
> +
> +When a trace is added to a context, it is opened and ready to read using
> +an iterator. While creating an iterator, optional starting and ending
> +position may be specified. So far, only ctf iterator are supported.
> +
> + >>> begin_pos = babeltrace.IterPos(babeltrace.SEEK_BEGIN)
> + >>> iterator = babeltrace.ctf.Iterator(ctx, begin_pos)
> +
> +From there, it is possible to read the events.
> +
> + >>> event = iterator.read_event()
> +
> +It is simple to obtain the timestamp of that event.
> +
> + >>> timestamp = event.get_timestamp()
> +
> +Let's say that we want to extract the prev_comm context info for a
> +sched_switch event. To do so, it is needed to set an event scope
> +with which we can obtain the field wanted.
> +
> + >>> if event.get_name == "sched_switch":
> + ... #prev_comm only for sched_switch events
> + ... scope = event.get_top_level_scope(babeltrace.ctf.scope.EVENT_FIELDS)
> + ... field = event.get_field(scope, "_prev_comm")
> + ... prev_comm = field.get_char_array()
> +
> +It is also possible to move on to the next event.
> +
> + >>> ret = iterator.next() # Move the iterator
> + >>> if ret == 0: # No error occured
> + ... event = iterator.read_event() # Read the next event
> +
> +For many usage script examples of the Babeltrace Python module, see the
> +bindings/python/examples directory.
> diff --git a/m4/ax_pkg_swig.m4 b/m4/ax_pkg_swig.m4
> new file mode 100644
> index 0000000..e112f3d
> --- /dev/null
> +++ b/m4/ax_pkg_swig.m4
> @@ -0,0 +1,135 @@
> +# ===========================================================================
> +# http://www.gnu.org/software/autoconf-archive/ax_pkg_swig.html
> +# ===========================================================================
> +#
> +# SYNOPSIS
> +#
> +# AX_PKG_SWIG([major.minor.micro], [action-if-found], [action-if-not-found])
> +#
> +# DESCRIPTION
> +#
> +# This macro searches for a SWIG installation on your system. If found,
> +# then SWIG is AC_SUBST'd; if not found, then $SWIG is empty. If SWIG is
> +# found, then SWIG_LIB is set to the SWIG library path, and AC_SUBST'd.
> +#
> +# You can use the optional first argument to check if the version of the
> +# available SWIG is greater than or equal to the value of the argument. It
> +# should have the format: N[.N[.N]] (N is a number between 0 and 999. Only
> +# the first N is mandatory.) If the version argument is given (e.g.
> +# 1.3.17), AX_PKG_SWIG checks that the swig package is this version number
> +# or higher.
> +#
> +# As usual, action-if-found is executed if SWIG is found, otherwise
> +# action-if-not-found is executed.
> +#
> +# In configure.in, use as:
> +#
> +# AX_PKG_SWIG(1.3.17, [], [ AC_MSG_ERROR([SWIG is required to build..]) ])
> +# AX_SWIG_ENABLE_CXX
> +# AX_SWIG_MULTI_MODULE_SUPPORT
> +# AX_SWIG_PYTHON
> +#
> +# LICENSE
> +#
> +# Copyright (c) 2008 Sebastian Huber <sebastian-huber at web.de>
> +# Copyright (c) 2008 Alan W. Irwin <irwin at beluga.phys.uvic.ca>
> +# Copyright (c) 2008 Rafael Laboissiere <rafael at laboissiere.net>
> +# Copyright (c) 2008 Andrew Collier <colliera at ukzn.ac.za>
> +# Copyright (c) 2011 Murray Cumming <murrayc at openismus.com>
> +#
> +# This program is free software; you can redistribute it and/or modify it
> +# under the terms of the GNU General Public License as published by the
> +# Free Software Foundation; either version 2 of the License, or (at your
> +# option) any later version.
> +#
> +# This program is distributed in the hope that it will be useful, but
> +# WITHOUT ANY WARRANTY; without even the implied warranty of
> +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
> +# Public License for more details.
> +#
> +# You should have received a copy of the GNU General Public License along
> +# with this program. If not, see <http://www.gnu.org/licenses/>.
> +#
> +# As a special exception, the respective Autoconf Macro's copyright owner
> +# gives unlimited permission to copy, distribute and modify the configure
> +# scripts that are the output of Autoconf when processing the Macro. You
> +# need not follow the terms of the GNU General Public License when using
> +# or distributing such scripts, even though portions of the text of the
> +# Macro appear in them. The GNU General Public License (GPL) does govern
> +# all other use of the material that constitutes the Autoconf Macro.
> +#
> +# This special exception to the GPL applies to versions of the Autoconf
> +# Macro released by the Autoconf Archive. When you make and distribute a
> +# modified version of the Autoconf Macro, you may extend this special
> +# exception to the GPL to apply to your modified version as well.
> +
> +#serial 8
> +
> +AC_DEFUN([AX_PKG_SWIG],[
> + # Ubuntu has swig 2.0 as /usr/bin/swig2.0
> + AC_PATH_PROGS([SWIG],[swig swig2.0])
> + if test -z "$SWIG" ; then
> + m4_ifval([$3],[$3],[:])
> + elif test -n "$1" ; then
> + AC_MSG_CHECKING([SWIG version])
> + [swig_version=`$SWIG -version 2>&1 | grep 'SWIG Version' | sed 's/.*\([0-9][0-9]*\.[0-9][0-9]*\.[0-9][0-9]*\).*/\1/g'`]
> + AC_MSG_RESULT([$swig_version])
> + if test -n "$swig_version" ; then
> + # Calculate the required version number components
> + [required=$1]
> + [required_major=`echo $required | sed 's/[^0-9].*//'`]
> + if test -z "$required_major" ; then
> + [required_major=0]
> + fi
> + [required=`echo $required | sed 's/[0-9]*[^0-9]//'`]
> + [required_minor=`echo $required | sed 's/[^0-9].*//'`]
> + if test -z "$required_minor" ; then
> + [required_minor=0]
> + fi
> + [required=`echo $required | sed 's/[0-9]*[^0-9]//'`]
> + [required_patch=`echo $required | sed 's/[^0-9].*//'`]
> + if test -z "$required_patch" ; then
> + [required_patch=0]
> + fi
> + # Calculate the available version number components
> + [available=$swig_version]
> + [available_major=`echo $available | sed 's/[^0-9].*//'`]
> + if test -z "$available_major" ; then
> + [available_major=0]
> + fi
> + [available=`echo $available | sed 's/[0-9]*[^0-9]//'`]
> + [available_minor=`echo $available | sed 's/[^0-9].*//'`]
> + if test -z "$available_minor" ; then
> + [available_minor=0]
> + fi
> + [available=`echo $available | sed 's/[0-9]*[^0-9]//'`]
> + [available_patch=`echo $available | sed 's/[^0-9].*//'`]
> + if test -z "$available_patch" ; then
> + [available_patch=0]
> + fi
> + # Convert the version tuple into a single number for easier comparison.
> + # Using base 100 should be safe since SWIG internally uses BCD values
> + # to encode its version number.
> + required_swig_vernum=`expr $required_major \* 10000 \
> + \+ $required_minor \* 100 \+ $required_patch`
> + available_swig_vernum=`expr $available_major \* 10000 \
> + \+ $available_minor \* 100 \+ $available_patch`
> +
> + if test $available_swig_vernum -lt $required_swig_vernum; then
> + AC_MSG_WARN([SWIG version >= $1 is required. You have $swig_version.])
> + SWIG=''
> + m4_ifval([$3],[$3],[])
> + else
> + AC_MSG_CHECKING([for SWIG library])
> + SWIG_LIB=`$SWIG -swiglib`
> + AC_MSG_RESULT([$SWIG_LIB])
> + m4_ifval([$2],[$2],[])
> + fi
> + else
> + AC_MSG_WARN([cannot determine SWIG version])
> + SWIG=''
> + m4_ifval([$3],[$3],[])
> + fi
> + fi
> + AC_SUBST([SWIG_LIB])
> +])
> diff --git a/tests/tests-python.py b/tests/tests-python.py
> new file mode 100644
> index 0000000..0bd71c2
> --- /dev/null
> +++ b/tests/tests-python.py
> @@ -0,0 +1,115 @@
> +import unittest
> +import sys
> +from babeltrace import *
> +
> +class TestBabeltracePythonModule(unittest.TestCase):
> +
> + def test_handle_decl(self):
> + #Context creation, adding trace
> + ctx = Context()
> + trace_handle = ctx.add_trace(
> + "ctf-traces/succeed/lttng-modules-2.0-pre5",
> + "ctf")
> + self.assertIsNotNone(trace_handle, "Error adding trace")
> +
> + #TraceHandle test
> + ts_begin = trace_handle.get_timestamp_begin(ctx, CLOCK_REAL)
> + ts_end = trace_handle.get_timestamp_end(ctx, CLOCK_REAL)
> + self.assertGreater(ts_end, ts_begin, "Error get_timestamp from trace_handle")
> +
> + lst = ctf.get_event_decl_list(trace_handle, ctx)
> + self.assertIsNotNone(lst, "Error get_event_decl_list")
> +
> + name = lst[0].get_name()
> + self.assertIsNotNone(name)
> +
> + fields = lst[0].get_decl_fields(ctf.scope.EVENT_FIELDS)
> + self.assertIsNotNone(fields, "Error getting FieldDecl list")
> +
> + if len(fields) > 0:
> + self.assertIsNotNone(fields[0].get_name(),
> + "Error getting name from FieldDecl")
> +
> + #Remove trace
> + ctx.remove_trace(trace_handle)
> + del ctx
> + del trace_handle
> +
> +
> + def test_iterator_event(self):
> + #Context creation, adding trace
> + ctx = Context()
> + trace_handle = ctx.add_trace(
> + "ctf-traces/succeed/lttng-modules-2.0-pre5",
> + "ctf")
> + self.assertIsNotNone(trace_handle, "Error adding trace")
> +
> + begin_pos = IterPos(SEEK_BEGIN)
> + it = ctf.Iterator(ctx, begin_pos)
> + self.assertIsNotNone(it, "Error creating iterator")
> +
> + event = it.read_event()
> + self.assertIsNotNone(event, "Error reading event")
> +
> + handle = event.get_handle()
> + self.assertIsNotNone(handle, "Error getting handle")
> +
> + context = event.get_context()
> + self.assertIsNotNone(context, "Error getting context")
> +
> + name = ""
> + while event is not None and name != "sched_switch":
> + name = event.get_name()
> + self.assertIsNotNone(name, "Error getting event name")
> +
> + ts = event.get_timestamp()
> + self.assertGreaterEqual(ts, 0, "Error getting timestamp")
> +
> + cycles = event.get_cycles()
> + self.assertGreaterEqual(cycles, 0, "Error getting cycles")
> +
> + if name == "sched_switch":
> + scope = event.get_top_level_scope(ctf.scope.STREAM_PACKET_CONTEXT)
> + self.assertIsNotNone(scope, "Error getting scope definition")
> +
> + field = event.get_field(scope, "cpu_id")
> + prev_comm_str = field.get_uint64()
> + self.assertEqual(ctf.field_error(), 0, "Error getting vec info")
> +
> + field_lst = event.get_field_list(scope)
> + self.assertIsNotNone(field_lst, "Error getting field list")
> +
> + fname = field.field_name()
> + self.assertIsNotNone(fname, "Error getting field name")
> +
> + ftype = field.field_type()
> + self.assertIsNot(ftype, -1, "Error getting field type (unknown)")
> +
> + ret = it.next()
> + self.assertGreaterEqual(ret, 0, "Error moving iterator")
> +
> + event = it.read_event()
> +
> + pos = it.get_pos()
> + self.assertIsNotNone(pos._pos, "Error getting iterator position")
> +
> + ret = it.set_pos(pos)
> + self.assertEqual(ret, 0, "Error setting iterator position")
> +
> + pos = it.create_time_pos(ts)
> + self.assertIsNotNone(pos._pos, "Error creating time-based iterator position")
> +
> + del it, pos
> + set_pos = IterPos(SEEK_TIME, ts)
> + it = ctf.Iterator(ctx, begin_pos, set_pos)
> + self.assertIsNotNone(it, "Error creating iterator with end_pos")
> +
> +
> + def test_file_class(self):
> + f = File("test-bitfield.c")
> + self.assertIsNotNone(f, "Error opening file")
> + f.close()
> +
> +
> +if __name__ == "__main__":
> + unittest.main()
> --
> 1.7.9.5
>
>
> _______________________________________________
> lttng-dev mailing list
> lttng-dev at lists.lttng.org
> http://lists.lttng.org/cgi-bin/mailman/listinfo/lttng-dev
--
Mathieu Desnoyers
Operating System Efficiency R&D Consultant
EfficiOS Inc.
http://www.efficios.com
More information about the lttng-dev
mailing list