[apparmor] [patch 07/12] Move nodeset caching into expr-tree.h

Seth Arnold seth.arnold at canonical.com
Thu Aug 21 06:45:01 UTC 2014


On Fri, Aug 15, 2014 at 12:20:42PM -0700, john.johansen at canonical.com wrote:
> We need to rework permission type mapping to nodesets, which means we
> need to move the nodeset computations earlier in the dfa creation
> processes, instead of a post step of follow(), so move the nodeset
> into expr-tree
> 
> Signed-off-by: John Johansen <john.johansen at canonical.com>

I asked some questions inline, but since this patch didn't introduce any
of what I'm curious about:

Acked-by: Seth Arnold <seth.arnold at canonical.com>

Thanks

> 
> ---
>  parser/libapparmor_re/expr-tree.h |  176 ++++++++++++++++++++++++++++++++++++++
>  parser/libapparmor_re/hfa.h       |  175 -------------------------------------
>  2 files changed, 176 insertions(+), 175 deletions(-)
> 
> --- 2.9-test.orig/parser/libapparmor_re/expr-tree.h
> +++ 2.9-test/parser/libapparmor_re/expr-tree.h
> @@ -607,4 +607,180 @@
>  	DenyMatchFlag(uint32_t flag, uint32_t quiet): MatchFlag(flag, quiet) {}
>  };
>  
> +
> +/*
> + * hashedNodes - for efficient set comparison
> + */
> +class hashedNodeSet {
> +public:
> +	unsigned long hash;
> +	NodeSet *nodes;
> +
> +	hashedNodeSet(NodeSet *n): nodes(n)
> +	{
> +		hash = hash_NodeSet(n);
> +	}
> +
> +	bool operator<(hashedNodeSet const &rhs)const
> +	{
> +		if (hash == rhs.hash) {
> +			if (nodes->size() == rhs.nodes->size())
> +				return *nodes < *(rhs.nodes);
> +			else
> +				return nodes->size() < rhs.nodes->size();
> +		} else {
> +			return hash < rhs.hash;
> +		}
> +	}
> +};
> +
> +
> +class hashedNodeVec {
> +public:
> +	typedef ImportantNode ** iterator;
> +	iterator begin() { return nodes; }
> +	iterator end() { iterator t = nodes ? &nodes[len] : NULL; return t; }
> +
> +	unsigned long hash;
> +	unsigned long len;
> +	ImportantNode **nodes;
> +
> +	hashedNodeVec(NodeSet *n)
> +	{
> +		hash = hash_NodeSet(n);
> +		len = n->size();
> +		nodes = new ImportantNode *[n->size()];
> +
> +		unsigned int j = 0;
> +		for (NodeSet::iterator i = n->begin(); i != n->end(); i++, j++) {
> +			nodes[j] = *i;
> +		}
> +	}
> +
> +	hashedNodeVec(NodeSet *n, unsigned long h): hash(h)
> +	{
> +		len = n->size();
> +		nodes = new ImportantNode *[n->size()];
> +		ImportantNode **j = nodes;
> +		for (NodeSet::iterator i = n->begin(); i != n->end(); i++) {
> +			*(j++) = *i;
> +		}
> +	}
> +
> +	~hashedNodeVec()
> +	{
> +		delete nodes;
> +	}
> +
> +	unsigned long size()const { return len; }
> +
> +	bool operator<(hashedNodeVec const &rhs)const
> +	{
> +		if (hash == rhs.hash) {
> +			if (len == rhs.size()) {
> +				for (unsigned int i = 0; i < len; i++) {
> +					if (nodes[i] != rhs.nodes[i])
> +						return nodes[i] < rhs.nodes[i];
> +				}
> +				return false;
> +			}
> +			return len < rhs.size();
> +		}
> +		return hash < rhs.hash;
> +	}
> +};
> +
> +class CacheStats {
> +public:
> +	unsigned long dup, sum, max;
> +
> +	CacheStats(void): dup(0), sum(0), max(0) { };
> +
> +	void clear(void) { dup = sum = max = 0; }
> +	virtual unsigned long size(void) const = 0;
> +};
> +
> +class NodeCache: public CacheStats {
> +public:
> +	set<hashedNodeSet> cache;
> +
> +	NodeCache(void): cache() { };
> +	~NodeCache() { clear(); };
> +
> +	virtual unsigned long size(void) const { return cache.size(); }
> +
> +	void clear()
> +	{
> +		for (set<hashedNodeSet>::iterator i = cache.begin();
> +		     i != cache.end(); i++) {
> +			delete i->nodes;
> +		}
> +		cache.clear();
> +		CacheStats::clear();
> +	}
> +
> +	NodeSet *insert(NodeSet *nodes)
> +	{
> +		if (!nodes)
> +			return NULL;
> +		pair<set<hashedNodeSet>::iterator,bool> uniq;
> +		uniq = cache.insert(hashedNodeSet(nodes));

Does this make two hashedNodeSet objects, one on the stack and then one
in the 'cache' set<>? Or does this copy stack object into the set<>? Would
a shared_ptr or new here only create one?

> +		if (uniq.second == false) {
> +			delete(nodes);
> +			dup++;

Is it expected behaviour for set::insert to delete its argument?

> +		} else {
> +			sum += nodes->size();
> +			if (nodes->size() > max)
> +				max = nodes->size();
> +		}
> +		return uniq.first->nodes;
> +	}
> +};
> +
> +struct deref_less_than {
> +       bool operator()(hashedNodeVec * const &lhs, hashedNodeVec * const &rhs)const
> +		{
> +			return *lhs < *rhs;
> +		}
> +};

There's funny whitespace all over this little class -- the 'const' at the
end of the prototype is missing a space and I'm pretty sure the braces are
in the wrong column.

> +
> +class NodeVecCache: public CacheStats {
> +public:
> +	set<hashedNodeVec *, deref_less_than> cache;
> +
> +	NodeVecCache(void): cache() { };
> +	~NodeVecCache() { clear(); };
> +
> +	virtual unsigned long size(void) const { return cache.size(); }
> +
> +	void clear()
> +	{
> +		for (set<hashedNodeVec *>::iterator i = cache.begin();
> +		     i != cache.end(); i++) {
> +			delete *i;
> +		}
> +		cache.clear();
> +		CacheStats::clear();
> +	}
> +
> +	hashedNodeVec *insert(NodeSet *nodes)
> +	{
> +		if (!nodes)
> +			return NULL;
> +		pair<set<hashedNodeVec *>::iterator,bool> uniq;
> +		hashedNodeVec *nv = new hashedNodeVec(nodes);
> +		uniq = cache.insert(nv);
> +		if (uniq.second == false) {
> +			delete nv;
> +			dup++;
> +		} else {
> +			sum += nodes->size();
> +			if (nodes->size() > max)
> +				max = nodes->size();
> +		}
> +		delete(nodes);
> +		return (*uniq.first);
> +	}
> +};
> +
>  #endif /* __LIBAA_RE_EXPR */
> --- 2.9-test.orig/parser/libapparmor_re/hfa.h
> +++ 2.9-test/parser/libapparmor_re/hfa.h
> @@ -131,181 +131,6 @@
>  int accept_perms(NodeSet *state, perms_t &perms);
>  
>  /*
> - * hashedNodes - for efficient set comparison
> - */
> -class hashedNodeSet {
> -public:
> -	unsigned long hash;
> -	NodeSet *nodes;
> -
> -	hashedNodeSet(NodeSet *n): nodes(n)
> -	{
> -		hash = hash_NodeSet(n);
> -	}
> -
> -	bool operator<(hashedNodeSet const &rhs)const
> -	{
> -		if (hash == rhs.hash) {
> -			if (nodes->size() == rhs.nodes->size())
> -				return *nodes < *(rhs.nodes);
> -			else
> -				return nodes->size() < rhs.nodes->size();
> -		} else {
> -			return hash < rhs.hash;
> -		}
> -	}
> -};
> -
> -
> -class hashedNodeVec {
> -public:
> -	typedef ImportantNode ** iterator;
> -	iterator begin() { return nodes; }
> -	iterator end() { iterator t = nodes ? &nodes[len] : NULL; return t; }
> -
> -	unsigned long hash;
> -	unsigned long len;
> -	ImportantNode **nodes;
> -
> -	hashedNodeVec(NodeSet *n)
> -	{
> -		hash = hash_NodeSet(n);
> -		len = n->size();
> -		nodes = new ImportantNode *[n->size()];
> -
> -		unsigned int j = 0;
> -		for (NodeSet::iterator i = n->begin(); i != n->end(); i++, j++) {
> -			nodes[j] = *i;
> -		}
> -	}
> -
> -	hashedNodeVec(NodeSet *n, unsigned long h): hash(h)
> -	{
> -		len = n->size();
> -		nodes = new ImportantNode *[n->size()];
> -		ImportantNode **j = nodes;
> -		for (NodeSet::iterator i = n->begin(); i != n->end(); i++) {
> -			*(j++) = *i;
> -		}
> -	}
> -
> -	~hashedNodeVec()
> -	{
> -		delete nodes;
> -	}
> -
> -	unsigned long size()const { return len; }
> -
> -	bool operator<(hashedNodeVec const &rhs)const
> -	{
> -		if (hash == rhs.hash) {
> -			if (len == rhs.size()) {
> -				for (unsigned int i = 0; i < len; i++) {
> -					if (nodes[i] != rhs.nodes[i])
> -						return nodes[i] < rhs.nodes[i];
> -				}
> -				return false;
> -			}
> -			return len < rhs.size();
> -		}
> -		return hash < rhs.hash;
> -	}
> -};
> -
> -class CacheStats {
> -public:
> -	unsigned long dup, sum, max;
> -
> -	CacheStats(void): dup(0), sum(0), max(0) { };
> -
> -	void clear(void) { dup = sum = max = 0; }
> -	virtual unsigned long size(void) const = 0;
> -};
> -
> -class NodeCache: public CacheStats {
> -public:
> -	set<hashedNodeSet> cache;
> -
> -	NodeCache(void): cache() { };
> -	~NodeCache() { clear(); };
> -
> -	virtual unsigned long size(void) const { return cache.size(); }
> -
> -	void clear()
> -	{
> -		for (set<hashedNodeSet>::iterator i = cache.begin();
> -		     i != cache.end(); i++) {
> -			delete i->nodes;
> -		}
> -		cache.clear();
> -		CacheStats::clear();
> -	}
> -
> -	NodeSet *insert(NodeSet *nodes)
> -	{
> -		if (!nodes)
> -			return NULL;
> -		pair<set<hashedNodeSet>::iterator,bool> uniq;
> -		uniq = cache.insert(hashedNodeSet(nodes));
> -		if (uniq.second == false) {
> -			delete(nodes);
> -			dup++;
> -		} else {
> -			sum += nodes->size();
> -			if (nodes->size() > max)
> -				max = nodes->size();
> -		}
> -		return uniq.first->nodes;
> -	}
> -};
> -
> -struct deref_less_than {
> -       bool operator()(hashedNodeVec * const &lhs, hashedNodeVec * const &rhs)const
> -		{
> -			return *lhs < *rhs;
> -		}
> -};
> -
> -class NodeVecCache: public CacheStats {
> -public:
> -	set<hashedNodeVec *, deref_less_than> cache;
> -
> -	NodeVecCache(void): cache() { };
> -	~NodeVecCache() { clear(); };
> -
> -	virtual unsigned long size(void) const { return cache.size(); }
> -
> -	void clear()
> -	{
> -		for (set<hashedNodeVec *>::iterator i = cache.begin();
> -		     i != cache.end(); i++) {
> -			delete *i;
> -		}
> -		cache.clear();
> -		CacheStats::clear();
> -	}
> -
> -	hashedNodeVec *insert(NodeSet *nodes)
> -	{
> -		if (!nodes)
> -			return NULL;
> -		pair<set<hashedNodeVec *>::iterator,bool> uniq;
> -		hashedNodeVec *nv = new hashedNodeVec(nodes);
> -		uniq = cache.insert(nv);
> -		if (uniq.second == false) {
> -			delete nv;
> -			dup++;
> -		} else {
> -			sum += nodes->size();
> -			if (nodes->size() > max)
> -				max = nodes->size();
> -		}
> -		delete(nodes);
> -		return (*uniq.first);
> -	}
> -};
> -
> -/*
>   * ProtoState - NodeSet and ancillery information used to create a state
>   */
>  class ProtoState {
> 
> 
> -- 
> AppArmor mailing list
> AppArmor at lists.ubuntu.com
> Modify settings or unsubscribe at: https://lists.ubuntu.com/mailman/listinfo/apparmor
> 
-------------- next part --------------
A non-text attachment was scrubbed...
Name: signature.asc
Type: application/pgp-signature
Size: 473 bytes
Desc: Digital signature
URL: <https://lists.ubuntu.com/archives/apparmor/attachments/20140820/3f9b6854/attachment.pgp>


More information about the AppArmor mailing list