Skip to content
Snippets Groups Projects
Commit ffa1e462 authored by g0dil's avatar g0dil
Browse files

senfscons: Reimplemented Doxyfile parser

Fix TAGFILE dependencies
Fix some URL references
PPI: Add additional 'route' documentation
Globally disable SUBGROUPING in Doxygile.global
parent 9232ed30
No related branches found
No related tags found
No related merge requests found
@INCLUDE = "$(TOPDIR)/doclib/Doxyfile.global" @INCLUDE = "$(TOPDIR)/doclib/Doxyfile.global"
PROJECT_NAME = Overview PROJECT_NAME = Overview
PROJECT_NUMBER = "(Version $(REVISION))" PROJECT_NUMBER = "(Version $(REVISION))"
HTML_HEADER = doclib/doxy-header-overview.html
GENERATE_XML = NO
GENERATE_TAGFILE = doc/overview.tag
HTML_HEADER = doclib/doxy-header-overview.html TAGFILES = \
GENERATE_XML = NO "$(TOPDIR)/PPI/doc/PPI.tag" \
"$(TOPDIR)/Scheduler/doc/Scheduler.tag" \
GENERATE_TAGFILE = doc/overview.tag "$(TOPDIR)/Packets/doc/Packets.tag" \
\ No newline at end of file "$(TOPDIR)/Socket/doc/Socket.tag" \
"$(TOPDIR)/Utils/doc/Utils.tag"
@INCLUDE = "$(TOPDIR)/doclib/Doxyfile.global" @INCLUDE = "$(TOPDIR)/doclib/Doxyfile.global"
PROJECT_NAME = Examples PROJECT_NAME = Examples
@INCLUDE = "$(TOPDIR)/doclib/Doxyfile.global" @INCLUDE = "$(TOPDIR)/doclib/Doxyfile.global"
PROJECT_NAME = RateStuffer PROJECT_NAME = RateStuffer
TAGFILES = "$(TOPDIR)/Utils/doc/Utils.tag" "$(TOPDIR)/Socket/doc/Socket.tag" "$(TOPDIR)/Packets/doc/Packets.tag" "$(TOPDIR)/Scheduler/doc/Scheduler.tag" "$(TOPDIR)/PPI/doc/PPI.tag"
ALPHABETICAL_INDEX = NO ALPHABETICAL_INDEX = NO
EXAMPLE_PATH = .
EXAMPLE_PATH = "." TAGFILES = \
\ No newline at end of file "$(TOPDIR)/PPI/doc/PPI.tag" \
"$(TOPDIR)/Scheduler/doc/Scheduler.tag" \
"$(TOPDIR)/Packets/doc/Packets.tag" \
"$(TOPDIR)/Socket/doc/Socket.tag" \
"$(TOPDIR)/Utils/doc/Utils.tag"
@INCLUDE = "$(TOPDIR)/doclib/Doxyfile.global" @INCLUDE = "$(TOPDIR)/doclib/Doxyfile.global"
PROJECT_NAME = Sniffer PROJECT_NAME = Sniffer
TAGFILES = "$(TOPDIR)/Utils/doc/Utils.tag" "$(TOPDIR)/Socket/doc/Socket.tag" "$(TOPDIR)/Packets/doc/Packets.tag" "$(TOPDIR)/Scheduler/doc/Scheduler.tag" EXAMPLE_PATH = .
EXAMPLE_PATH = "." TAGFILES = \
\ No newline at end of file "$(TOPDIR)/Scheduler/doc/Scheduler.tag" \
"$(TOPDIR)/Packets/doc/Packets.tag" \
"$(TOPDIR)/Socket/doc/Socket.tag" \
"$(TOPDIR)/Utils/doc/Utils.tag"
...@@ -177,9 +177,9 @@ ...@@ -177,9 +177,9 @@
\see \ref components \n \see \ref components \n
\ref build \n \ref build \n
<a href="../../../Socket/doc/html/index.html"><b>libSocket API reference</b></a> \n <a href="../../../../Socket/doc/html/index.html"><b>libSocket API reference</b></a> \n
<a href="../../../Packets/doc/html/index.html"><b>libPackets API reference</b></a> \n <a href="../../../../Packets/doc/html/index.html"><b>libPackets API reference</b></a> \n
<a href="../../../Utils/doc/html/index.html"><b>libUtils API reference</b></a> <a href="../../../../Utils/doc/html/index.html"><b>libUtils API reference</b></a>
*/ */
......
@INCLUDE = "$(TOPDIR)/doclib/Doxyfile.global" @INCLUDE = "$(TOPDIR)/doclib/Doxyfile.global"
PROJECT_NAME = libPPI PROJECT_NAME = libPPI
GENERATE_TAGFILE = doc/ppi.tag GENERATE_TAGFILE = doc/PPI.tag
RECURSIVE = Yes RECURSIVE = Yes
SHOW_DIRECTORIES = Yes SHOW_DIRECTORIES = Yes
TAGFILES = "$(TOPDIR)/Packets/doc/Packets.tag" "$(TOPDIR)/Socket/doc/Socket.tag" "$(TOPDIR)/Utils/doc/Utils.tag" "$(TOPDIR)/Scheduler/doc/Scheduler.tag" TAGFILES = \
GENERATE_TAGFILE = "doc/PPI.tag" "$(TOPDIR)/Scheduler/doc/Scheduler.tag" \
"$(TOPDIR)/Packets/doc/Packets.tag" \
"$(TOPDIR)/Socket/doc/Socket.tag" \
"$(TOPDIR)/Utils/doc/Utils.tag"
...@@ -178,8 +178,12 @@ namespace module { ...@@ -178,8 +178,12 @@ namespace module {
protected: protected:
Module(); Module();
#ifndef DOXYGEN
template <class Source, class Target> template <class Source, class Target>
Route<Source, Target> & route(Source & source, Target & target); Route<Source, Target> & route(Source & source, Target & target);
#else
Route<connector::InputConnector, connector::OutputConnector> &
route(connector::InputConnector & input, connector::OutputConnector & output);
///< Define flow information ///< Define flow information
/**< Using the route() and noroute() members, the /**< Using the route() and noroute() members, the
information flow within the module is defined. Routing information flow within the module is defined. Routing
...@@ -200,11 +204,22 @@ namespace module { ...@@ -200,11 +204,22 @@ namespace module {
parameters like throttling parameters. parameters like throttling parameters.
\param[in] source Data source, object which controls \param[in] source Data source, object which controls
incoming data incoming data (connector or event)
\param[in] target Data target, object which controls \param[in] target Data target, object which controls
outgoing data outgoing data (connector or event)
\returns Route instance describing this route */ \returns Route instance describing this route */
Route<connector::InputConnector, EventDescriptor> &
route(connector::InputConnector & input, EventDescriptor & output);
///< Define flow information
/**< \see \ref route() */
Route<EventDescriptor, connector::OutputConnector> &
route(EventDescriptor & input, connector::OutputConnector & output);
///< Define flow information
/**< \see \ref route() */
#endif
void noroute(connector::Connector & connector); ///< Define terminal connectors void noroute(connector::Connector & connector); ///< Define terminal connectors
/**< The noroute() member explicitly declares, that a /**< The noroute() member explicitly declares, that a
connector is terminal and does not directly connector is terminal and does not directly
...@@ -244,9 +259,7 @@ namespace module { ...@@ -244,9 +259,7 @@ namespace module {
#ifndef DOXYGEN #ifndef DOXYGEN
virtual void macro_SENF_PPI_MODULE_missing() = 0; virtual void macro_SENF_PPI_MODULE_missing() = 0;
#endif
#ifndef DOXYGEN
private: private:
#endif #endif
virtual void init(); ///< Called just before the network is run virtual void init(); ///< Called just before the network is run
......
@INCLUDE = "$(TOPDIR)/doclib/Doxyfile.global" @INCLUDE = "$(TOPDIR)/doclib/Doxyfile.global"
PROJECT_NAME = DefaultBundle PROJECT_NAME= DefaultBundle
TAGFILES = "$(TOPDIR)/Utils/doc/Utils.tag" "$(TOPDIR)/Packets/doc/Packets.tag"
GENERATE_TAGFILE = doc/DefaultBundle.tag GENERATE_TAGFILE = doc/DefaultBundle.tag
INPUT = . INPUT = .
ALPHABETICAL_INDEX = NO ALPHABETICAL_INDEX = NO
TAGFILES = \
"$(TOPDIR)/Packets/doc/Packets.tag" \
"$(TOPDIR)/Socket/doc/Socket.tag" \
"$(TOPDIR)/Utils/doc/Utils.tag"
@INCLUDE = "$(TOPDIR)/doclib/Doxyfile.global" @INCLUDE = "$(TOPDIR)/doclib/Doxyfile.global"
PROJECT_NAME = libPackets PROJECT_NAME = libPackets
TAGFILES = "$(TOPDIR)/Utils/doc/Utils.tag"
GENERATE_TAGFILE = doc/Packets.tag GENERATE_TAGFILE = doc/Packets.tag
INPUT = . INPUT = .
EXAMPLE_PATH = . DefaultBundle
EXAMPLE_PATH = . DefaultBundle TAGFILES = \
"$(TOPDIR)/Socket/doc/Socket.tag" \
"$(TOPDIR)/Utils/doc/Utils.tag"
@INCLUDE = "$(TOPDIR)/doclib/Doxyfile.global" @INCLUDE = "$(TOPDIR)/doclib/Doxyfile.global"
PROJECT_NAME = MPEGDVBBundle PROJECT_NAME = MPEGDVBBundle
TAGFILES = "$(TOPDIR)/Utils/doc/Utils.tag" "$(TOPDIR)/Packets/doc/Packets.tag"
GENERATE_TAGFILE = doc/MPEGDVBBundle.tag GENERATE_TAGFILE = doc/MPEGDVBBundle.tag
INPUT = . INPUT = .
ALPHABETICAL_INDEX = NO ALPHABETICAL_INDEX = NO
TAGFILES = \
"$(TOPDIR)/Packets/doc/Packets.tag" \
"$(TOPDIR)/Socket/doc/Socket.tag" \
"$(TOPDIR)/Utils/doc/Utils.tag"
@INCLUDE = "$(TOPDIR)/doclib/Doxyfile.global" @INCLUDE = "$(TOPDIR)/doclib/Doxyfile.global"
PROJECT_NAME = libScheduler PROJECT_NAME = libScheduler
TAGFILES = "$(TOPDIR)/Utils/doc/Utils.tag"
GENERATE_TAGFILE = doc/Scheduler.tag GENERATE_TAGFILE = doc/Scheduler.tag
ALPHABETICAL_INDEX = NO ALPHABETICAL_INDEX = NO
TAGFILES = \
"$(TOPDIR)/Socket/doc/Socket.tag" \
"$(TOPDIR)/Utils/doc/Utils.tag"
@INCLUDE = "$(TOPDIR)/doclib/Doxyfile.global" @INCLUDE = "$(TOPDIR)/doclib/Doxyfile.global"
PROJECT_NAME = libSocket PROJECT_NAME = libSocket
TAGFILES = "$(TOPDIR)/Utils/doc/Utils.tag"
GENERATE_TAGFILE = doc/Socket.tag GENERATE_TAGFILE = doc/Socket.tag
RECURSIVE = Yes RECURSIVE = Yes
SHOW_DIRECTORIES = Yes SHOW_DIRECTORIES = Yes
TAGFILES = "$(TOPDIR)/Utils/doc/Utils.tag"
...@@ -51,13 +51,14 @@ namespace senf { ...@@ -51,13 +51,14 @@ namespace senf {
class only allows changing those fields which need to be changed. The other fields are class only allows changing those fields which need to be changed. The other fields are
read-only. They are filled by the operating system when receiving a packet read-only. They are filled by the operating system when receiving a packet
\nosubgrouping
*/ */
class LLSocketAddress class LLSocketAddress
{ {
public: public:
/** \brief Valid pkttype() values /** \brief Valid pkttype() values
These are the possible values returned by arptype() These are the possible values returned by pkttype()
*/ */
enum PktType { Undefined = 0 enum PktType { Undefined = 0
, Host = PACKET_HOST /**< Packet destined for this host */ , Host = PACKET_HOST /**< Packet destined for this host */
...@@ -68,6 +69,10 @@ namespace senf { ...@@ -68,6 +69,10 @@ namespace senf {
, Outgoing = PACKET_OUTGOING /**< Packet sent out from this host */ , Outgoing = PACKET_OUTGOING /**< Packet sent out from this host */
}; };
///////////////////////////////////////////////////////////////////////////
///\name Structors and default members
///@{
LLSocketAddress(); ///< Create empty address LLSocketAddress(); ///< Create empty address
explicit LLSocketAddress(unsigned proto, std::string const & iface=""); explicit LLSocketAddress(unsigned proto, std::string const & iface="");
///< Create address for \c bind() ///< Create address for \c bind()
...@@ -90,7 +95,9 @@ namespace senf { ...@@ -90,7 +95,9 @@ namespace senf {
\param addr Address to send data to \param addr Address to send data to
\param iface Interface to send packet from */ \param iface Interface to send packet from */
void clear(); ///< Clear the address ///@}
///////////////////////////////////////////////////////////////////////////
unsigned protocol() const; ///< Return address protocol (ethertype) unsigned protocol() const; ///< Return address protocol (ethertype)
std::string interface() const; ///< Return interface name std::string interface() const; ///< Return interface name
......
...@@ -17,6 +17,7 @@ BUILTIN_STL_SUPPORT = YES ...@@ -17,6 +17,7 @@ BUILTIN_STL_SUPPORT = YES
EXTRACT_ALL = YES EXTRACT_ALL = YES
EXTRACT_PRIVATE = NO EXTRACT_PRIVATE = NO
EXTRACT_STATIC = NO EXTRACT_STATIC = NO
SUBGROUPING = NO
HIDE_FRIEND_COMPOUNDS = YES HIDE_FRIEND_COMPOUNDS = YES
INTERNAL_DOCS = YES INTERNAL_DOCS = YES
SOURCE_BROWSER = YES SOURCE_BROWSER = YES
......
...@@ -93,88 +93,177 @@ ...@@ -93,88 +93,177 @@
# #
# You will find all this in the DoxyEmitter # You will find all this in the DoxyEmitter
import os, sys, traceback import os, sys, traceback, string
import os.path import os.path
import glob, re import glob, re
import SCons.Action import SCons.Action
from fnmatch import fnmatch from fnmatch import fnmatch
EnvVar = re.compile(r"\$\(([0-9A-Za-z_-]+)\)") class DoxyfileLexer:
def __init__(self,stream):
self._stream = stream
self._buffer = ""
self.lineno = 0
self._eof = False
self._fillbuffer()
VARIABLE_RE = re.compile("[@A-Z_]+")
OPERATOR_RE = re.compile("\\+?=")
VALUE_RE = re.compile("\\S+")
def _readline(self):
if self._eof:
self._buffer = ""
return
self._buffer = self._stream.readline()
if not self._buffer:
self._eof = True
return
self._buffer = self._buffer.strip()
self.lineno += 1
def _skip(self, nchars=0):
self._buffer = self._buffer[nchars:].strip()
while self._buffer[:1] == '\\' and not self.eof():
self._readline()
if self._buffer[:1] == '#':
self._buffer=""
def _fillbuffer(self):
while not self._buffer and not self.eof():
self._readline()
self._skip()
def _token(self, re, read=False):
if not self._buffer and read:
self._fillbuffer()
if not self._buffer:
return ""
m = re.match(self._buffer)
if m:
v = self._buffer[:m.end()]
self._skip(m.end())
return v
else:
raise ValueError,"Invalid input"
def var(self): return self._token(self.VARIABLE_RE, True)
def op(self): return self._token(self.OPERATOR_RE)
def next(self):
if not self._buffer:
raise StopIteration
if self._buffer[0] == '"':
return self._qstr()
m = self.VALUE_RE.match(self._buffer)
if m:
v = self._buffer[:m.end()]
self._skip(m.end())
return v
else:
raise ValueError
def __iter__(self):
return self
QSKIP_RE = re.compile("[^\\\"]+")
def _qstr(self):
self._buffer = self._buffer[1:]
v = ""
while self._buffer:
m = self.QSKIP_RE.match(self._buffer)
if m:
v += self._buffer[:m.end()]
self._buffer = self._buffer[m.end():]
if self._buffer[:1] == '"':
self._skip(1)
return v
if self._buffer[:1] == '\\' and len(self._buffer)>1:
v += self._buffer[1]
self._buffer = self._buffer[2:]
else:
raise ValueError,"Unexpected charachter in string"
raise ValueError,"Unterminated string"
def eof(self):
return self._eof
class DoxyfileParser:
ENVVAR_RE = re.compile(r"\$\(([0-9A-Za-z_-]+)\)")
def __init__(self, path, env, include_path=None, items = None):
self._env = env
self._include_path = include_path or []
self._lexer = DoxyfileLexer(file(path))
self._dir = os.path.split(path)[0]
self._items = items or {}
def parse(self):
while True:
var = self._lexer.var()
if not var: break;
op = self._lexer.op()
value = [ self._envsub(v) for v in self._lexer ]
if not value:
raise ValueError,"Missing value in assignment"
if var[0] == '@':
self._meta(var,op,value)
elif op == '=':
self._items[var] = value
else:
self._items.setdefault(var,[]).extend(value)
def _envsub(self,value):
return self.ENVVAR_RE.sub(lambda m, env=self._env : str(env.get(m.group(1),"")), value)
def _meta(self, cmd, op, value):
m = '_'+cmd[1:]
try:
m = getattr(self,m)
except AttributeError:
raise ValueError,'Unknown meta command ' + cmd
m(op,value)
def _INCLUDE(self, op, value):
if len(value) != 1:
raise ValueError,"Invalid argument to @INCLUDE"
for d in [ self._dir ] + self._include_path:
p = os.path.join(d,value[0])
if os.path.exists(p):
self._items.setdefault('@INCLDUE',[]).append(p)
parser = DoxyfileParser(p, self._env, self._include_path, self._items)
parser.parse()
return
raise ValueError,"@INCLUDE file not found"
def _INCLUDE_PATH(self, op, value):
self._include_path.extend(value)
def items(self):
return self._items
def DoxyfileParse(env,file): def DoxyfileParse(env,file):
ENV = {} ENV = {}
ENV.update(env.get("ENV",{})) ENV.update(env.get("ENV",{}))
ENV['TOPDIR'] = env.Dir('#').abspath ENV['TOPDIR'] = env.Dir('#').abspath
data = DoxyfileParse_(file,{},ENV) parser = DoxyfileParser(file,ENV)
try:
parser.parse()
except ValueError, v:
print "WARNING: Error while parsing doxygen configuration '%s': %s" % (str(file),str(v))
return {}
data = parser.items()
for k,v in data.items(): for k,v in data.items():
if not v : del data[k] if not v : del data[k]
elif k in ("INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS", "@INCLUDE", "TAGFILES") : continue elif k in ("INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS", "@INCLUDE", "TAGFILES") : continue
elif len(v)==1 : data[k] = v[0] elif len(v)==1 : data[k] = v[0]
return data return data
def DoxyfileParse_(file, data, ENV):
"""
Parse a Doxygen source file and return a dictionary of all the values.
Values will be strings and lists of strings.
"""
try:
dir = os.path.dirname(file)
import shlex
lex = shlex.shlex(instream=open(file), posix=True)
lex.wordchars += "*+=./-:@~$()"
lex.whitespace = lex.whitespace.replace("\n", "")
lex.escape = "\\"
lineno = lex.lineno
token = lex.get_token()
key = None
last_token = ""
key_token = True
next_key = False
new_data = True
def append_data(data, key, new_data, token):
if new_data or len(data[key]) == 0:
data[key].append(token)
else:
data[key][-1] += token
while token:
if token=='\n':
if last_token!='\\':
key_token = True
elif token=='\\':
pass
elif key_token:
key = token
key_token = False
else:
if token=="+=" or (token=="=" and key=="@INCLUDE"):
if not data.has_key(key):
data[key] = []
elif token == "=":
data[key] = []
else:
token = EnvVar.sub(lambda m,ENV=ENV: str(ENV.get(m.group(1),"")),token)
append_data(data, key, new_data, token)
new_data = True
if key=='@INCLUDE':
inc = os.path.join(dir,data['@INCLUDE'][-1])
if os.path.exists(inc) :
DoxyfileParse_(inc,data,ENV)
last_token = token
token = lex.get_token()
if last_token=='\\' and token!='\n':
new_data = False
append_data(data, key, new_data, '\\')
return data
except:
return {}
def DoxySourceScan(node, env, path): def DoxySourceScan(node, env, path):
""" """
Doxygen Doxyfile source scanner. This should scan the Doxygen file and add Doxygen Doxyfile source scanner. This should scan the Doxygen file and add
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment