package net.deterlab.abac; import org.apache.commons.collections15.*; import org.apache.commons.collections15.functors.*; import edu.uci.ics.jung.graph.*; import edu.uci.ics.jung.graph.event.*; import edu.uci.ics.jung.graph.util.*; import java.awt.geom.Point2D; import java.io.*; import java.util.*; import java.util.zip.*; import java.security.*; import java.security.cert.*; import org.bouncycastle.x509.*; import org.bouncycastle.x509.util.*; import org.bouncycastle.openssl.*; /** * Represents a global graph of credentials in the form of principals and * attributes. */ public class Context { static final int ABAC_CERT_SUCCESS = 0; static final int ABAC_CERT_INVALID = -1; static final int ABAC_CERT_BAD_SIG = -2; static final int ABAC_CERT_MISSING_ISSUER = -3; protected Graph g; protected Set derived_edges; protected Query pq; protected boolean dirty; protected Set identities; protected Map nicknames; protected Map keys; public Context() { /* create the graph */ g = Graphs.synchronizedDirectedGraph( new DirectedSparseGraph()); derived_edges = new HashSet(); pq = new Query(g); dirty = false; identities = new TreeSet(); nicknames = new TreeMap(); keys = new TreeMap(); } public Context(Context c) { this(); for (Identity i: c.identities) loadIDChunk(i); for (Credential cr: c.credentials()) loadAttributeChunk(cr); derive_implied_edges(); } public int loadIDFile(String fn) { return loadIDFile(new File(fn)); } public int loadIDFile(File fn) { try { addIdentity(new Identity(fn)); } catch (SignatureException sig) { return ABAC_CERT_BAD_SIG; } catch (Exception e) { return ABAC_CERT_INVALID; } return ABAC_CERT_SUCCESS; } public int loadIDChunk(Object c) { try { if (c instanceof Identity) addIdentity((Identity) c); else if (c instanceof String) addIdentity(new Identity((String) c)); else if (c instanceof File) addIdentity(new Identity((File) c)); else if (c instanceof X509Certificate) addIdentity(new Identity((X509Certificate) c)); else return ABAC_CERT_INVALID; } catch (SignatureException sig) { return ABAC_CERT_BAD_SIG; } catch (Exception e) { return ABAC_CERT_INVALID; } return ABAC_CERT_SUCCESS; } public int loadAttributeFile(String fn) { return loadAttributeFile(new File(fn)); } public int loadAttributeFile(File fn) { try { add_credential(new Credential(fn, identities)); } catch (InvalidKeyException sig) { return ABAC_CERT_MISSING_ISSUER; } catch (Exception e) { return ABAC_CERT_INVALID; } return ABAC_CERT_SUCCESS; } public int loadAttributeChunk(Object c) { try { if (c instanceof Credential) add_credential((Credential) c); else if (c instanceof String) add_credential(new Credential((String) c, identities)); else if (c instanceof File) add_credential(new Credential((File) c, identities)); else if ( c instanceof X509V2AttributeCertificate) add_credential(new Credential((X509V2AttributeCertificate)c, identities)); else return ABAC_CERT_INVALID; } catch (SignatureException sig) { return ABAC_CERT_BAD_SIG; } catch (Exception e) { return ABAC_CERT_INVALID; } return ABAC_CERT_SUCCESS; } public Collection query(String role, String principal) { derive_implied_edges(); Query q = new Query(g); Graph rg = q.run(role, principal); return rg.getEdges(); } /** * Returns a collection of the credentials in the graph. */ public Collection credentials() { Collection creds = new HashSet(); // only return creds with a cert: all others are derived edges for (Credential cred : g.getEdges()) if (cred.cert() != null) creds.add(cred); return creds; } /** * Returns a Query object which can be used to query the graph. This object * becomes invalid if the graph is modified. */ public Query querier() { derive_implied_edges(); return new Query(g); } /** * Add a credential to the graph. */ public void add_credential(Credential cred) { Role tail = cred.tail(); Role head = cred.head(); /* explicitly add the vertices, otherwise get a null pointer exception */ if ( !g.containsVertex(head)) g.addVertex(head); if ( !g.containsVertex(tail)) g.addVertex(tail); if (!g.containsEdge(cred)) g.addEdge(cred, tail, head); // add the prereqs of an intersection to the graph if (tail.is_intersection()) for (Role prereq : tail.prereqs()) g.addVertex(prereq); dirty = true; } /** * Remove a credential from the graph. */ public void remove_credential(Credential cred) { if (g.containsEdge(cred)) g.removeEdge(cred); dirty = true; } /** * Add a role w/o an edge */ public void add_vertex(Role v) { if (!g.containsVertex(v)) { g.addVertex(v); dirty = true; } } public void remove_vertex(Role v) { if (g.containsVertex(v)) { g.removeVertex(v); dirty = true; } } public Collection roles() { return g.getVertices(); } /** * Derive the implied edges in the graph, according to RT0 derivation rules. * They are added to this graph. See "Distributed Credential Chain Discovery * in Trust Management" by Ninghui Li et al. for details. Note that a * derived linking edge can imply a new intersection edge and vice versa. * Therefore we iteratively derive edges, giving up when an iteration * produces 0 new edges. */ protected synchronized void derive_implied_edges() { // nothing to do on a clean graph if (!dirty) return; clear_old_edges(); // iteratively derive links. continue as long as new links are added while (derive_links_iter() > 0) ; dirty = false; } /** * Single iteration of deriving implied edges. Returns the number of new * links added. */ protected int derive_links_iter() { int count = 0; /* for every node in the graph.. */ for (Role vertex : g.getVertices()) { if (vertex.is_intersection()) { // for each prereq edge: // find set of principals that have the prereq // find the intersection of all sets (i.e., principals that satisfy all prereqs) // for each principal in intersection: // add derived edge Set principals = null; for (Role prereq : vertex.prereqs()) { Set cur_principals = pq.find_principals(prereq); if (principals == null) principals = cur_principals; else // no, they couldn't just call it "intersection" principals.retainAll(cur_principals); if (principals.size() == 0) break; } // add em for (Role principal : principals) if (add_derived_edge(vertex, principal)) ++count; } else if (vertex.is_linking()) { // make the rest of the code a bit clearer Role A_r1_r2 = vertex; Role A_r1 = new Role(A_r1_r2.A_r1()); String r2 = A_r1_r2.r2(); /* locate the node A.r1 */ if (!g.containsVertex(A_r1)) continue; /* boring: nothing of the form A.r1 */ /* for each B that satisfies A_r1 */ for (Role principal : pq.find_principals(A_r1)) { Role B_r2 = new Role(principal + "." + r2); if (!g.containsVertex(B_r2)) continue; if (add_derived_edge(A_r1_r2, B_r2)) ++count; } } } return count; } /** * Add a derived edge in the graph. Returns true only if the edge does not * exist. */ protected boolean add_derived_edge(Role head, Role tail) { // edge exists: return false if (g.findEdge(tail, head) != null) return false; // add the new edge Credential derived_edge = new Credential(head, tail); derived_edges.add(derived_edge); g.addEdge(derived_edge, tail, head); return true; } /** * Clear the derived edges that currently exist in the graph. This is done * before the edges are rederived. The derived edges in filtered graphs are * also cleared. */ protected void clear_old_edges() { for (Credential i: derived_edges) g.removeEdge(i); derived_edges = new HashSet(); } /** * Put the Identity into the set of ids used to validate certificates. * Also put the keyID and name into the translation mappings used by Roles * to pretty print. In the role mapping, if multiple ids use the same * common name they are disambiguated. Only one entry for keyid is * allowed. */ protected void addIdentity(Identity id) { identities.add(id); if (id.getName() != null && id.getKeyID() != null) { if ( !keys.containsKey(id.getKeyID()) ) { String name = id.getName(); int n= 1; while (nicknames.containsKey(name)) { name = id.getName() + n++; } nicknames.put(name, id.getKeyID()); keys.put(id.getKeyID(), name); } } } /** * Translate either keys to nicknames or vice versa. Break the string into * space separated tokens and then each of them into period separated * strings. If any of the smallest strings is in the map, replace it with * the value. */ protected String replace(String is, Map m) { String rv = ""; for (String tok: is.split(" ")) { String term = ""; for (String s: tok.split("\\.")) { String next = m.containsKey(s) ? m.get(s) : s; if (term.isEmpty()) term = next; else term += "." + next; } if (rv.isEmpty()) rv = term; else rv += " " + term; } return rv; } public String expandKeyID(String s) { return replace(s, nicknames); } public String expandNickname(String s) { return replace(s, keys); } /** * Import a zip file. First import all the identities * (pem), then the credentials (der) into the credential graph then any * alias files into the two maps. If keys is not null, any key pairs in * PEM files are put in there. If errors is not null, errors reading files * are added indexed by filename. */ public void readZipFile(File zf, Collection keys, Map errors) throws IOException { Vector derEntries = new Vector(); Map ids = new TreeMap(); Map kps = new TreeMap(); ZipFile z = new ZipFile(zf); for (Enumeration ze = z.entries(); ze.hasMoreElements();) { ZipEntry f = ze.nextElement(); try { PEMReader r = new PEMReader( new InputStreamReader(z.getInputStream(f))); Object o = readPEM(r); if ( o != null ) { if (o instanceof Identity) { Identity i = (Identity) o; String kid = i.getKeyID(); if (kps.containsKey(kid) ) { i.setKeyPair(kps.get(kid)); kps.remove(kid); } else if (i.getKeyPair() == null ) ids.put(i.getKeyID(), i); loadIDChunk(i); } else if (o instanceof KeyPair ) { KeyPair kp = (KeyPair) o; String kid = Identity.extractKeyID(kp.getPublic()); if (ids.containsKey(kid)) { Identity i = ids.get(kid); i.setKeyPair(kp); ids.remove(kid); } else { kps.put(kid, kp); } } } else { // Not a PEM file derEntries.add(f); continue; } } catch (Exception e ) { if (errors != null ) errors.put(f.getName(), e); } } for ( ZipEntry f : derEntries ) { try { add_credential(new Credential(z.getInputStream(f), identities)); } catch (Exception e ) { if (errors != null ) errors.put(f.getName(), e); } } } public void readZipFile(File d) throws IOException { readZipFile(d, null, null); } public void readZipFile(File d, Map errors) throws IOException { readZipFile(d, null, errors); } public void readZipFile(File d, Collection keys) throws IOException { readZipFile(d, keys, null); } protected Object readPEM(PEMReader r) throws IOException { Identity i = null; KeyPair keys = null; Object o = null; while ( (o = r.readObject()) != null ) { if (o instanceof X509Certificate) { if ( i == null ) { try { i = new Identity((X509Certificate)o); } catch (Exception e) { // Translate Idenitiy exceptions to IOException throw new IOException(e); } if (keys != null ) { i.setKeyPair(keys); keys = null; } } else throw new IOException("Two certificates"); } else if (o instanceof KeyPair ) { if ( i != null ) i.setKeyPair((KeyPair) o); else keys = (KeyPair) o; } else { throw new IOException("Unexpected PEM object: " + o.getClass().getName()); } } if ( i != null ) return i; else if ( keys != null) return keys; else return null; } /** * Import a directory full of files. First import all the identities * (pem), then the credentials (der) into the credential graph then any * alias files into the two maps. If keys is not null, any key pairs in * PEM files are put in there. If errors is not null, errors reading files * are added indexed by filename. */ public void readDirectory(File d, Collection keys, Map errors) { Vector derFiles = new Vector(); Collection files = new Vector(); Map ids = new TreeMap(); Map kps = new TreeMap(); if (d.isDirectory() ) for (File f : d.listFiles()) files.add(f); else files.add(d); for (File f: files ) { try { PEMReader r = new PEMReader(new FileReader(f)); Object o = readPEM(r); if ( o != null ) { if (o instanceof Identity) { Identity i = (Identity) o; String kid = i.getKeyID(); if (kps.containsKey(kid) ) { i.setKeyPair(kps.get(kid)); kps.remove(kid); } else if (i.getKeyPair() == null ) ids.put(i.getKeyID(), i); loadIDChunk(i); } else if (o instanceof KeyPair ) { KeyPair kp = (KeyPair) o; String kid = Identity.extractKeyID(kp.getPublic()); if (ids.containsKey(kid)) { Identity i = ids.get(kid); i.setKeyPair(kp); ids.remove(kid); } else { kps.put(kid, kp); } } } else { // Not a PEM file derFiles.add(f); continue; } } catch (Exception e ) { if (errors != null ) errors.put(f.getName(), e); } } for ( File f : derFiles ) { try { add_credential(new Credential(f, identities)); } catch (Exception e ) { if (errors != null ) errors.put(f.getName(), e); } } } public void readDirectory(File d) { readDirectory(d, null, null); } public void readDirectory(File d, Map errors) { readDirectory(d, null, errors); } public void readDirectory(File d, Collection keys) { readDirectory(d, keys, null); } public void writeZipFile(File f, boolean allIDs, boolean withPrivateKeys) throws IOException { ZipOutputStream z = new ZipOutputStream(new FileOutputStream(f)); Set ids = allIDs ? identities : new TreeSet(); int n = 0; for (Credential c: credentials()) { z.putNextEntry(new ZipEntry("attr" + n++ + ".der")); c.write(z); z.closeEntry(); if ( c.getID() != null && !allIDs) ids.add(c.getID()); } for (Identity i: ids) { z.putNextEntry(new ZipEntry(i.getName() + ".pem")); i.write(z); if (withPrivateKeys) i.writePrivateKey(z); z.closeEntry(); } z.close(); } }