View Javadoc

1   /*
2    * ModeShape (http://www.modeshape.org)
3    * See the COPYRIGHT.txt file distributed with this work for information
4    * regarding copyright ownership.  Some portions may be licensed
5    * to Red Hat, Inc. under one or more contributor license agreements.
6    * See the AUTHORS.txt file in the distribution for a full listing of 
7    * individual contributors. 
8    *
9    * ModeShape is free software. Unless otherwise indicated, all code in ModeShape
10   * is licensed to you under the terms of the GNU Lesser General Public License as
11   * published by the Free Software Foundation; either version 2.1 of
12   * the License, or (at your option) any later version.
13   *
14   * ModeShape is distributed in the hope that it will be useful,
15   * but WITHOUT ANY WARRANTY; without even the implied warranty of
16   * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17   * Lesser General Public License for more details.
18   *
19   * You should have received a copy of the GNU Lesser General Public
20   * License along with this software; if not, write to the Free
21   * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
22   * 02110-1301 USA, or see the FSF site: http://www.fsf.org.
23   */
24  package org.modeshape.connector.store.jpa.model.simple;
25  
26  import java.io.ByteArrayInputStream;
27  import java.io.ByteArrayOutputStream;
28  import java.io.IOException;
29  import java.io.InputStream;
30  import java.io.ObjectInputStream;
31  import java.io.ObjectOutputStream;
32  import java.io.OutputStream;
33  import java.util.ArrayList;
34  import java.util.Arrays;
35  import java.util.Collection;
36  import java.util.Collections;
37  import java.util.HashMap;
38  import java.util.HashSet;
39  import java.util.Iterator;
40  import java.util.LinkedList;
41  import java.util.List;
42  import java.util.Map;
43  import java.util.Set;
44  import java.util.UUID;
45  import java.util.zip.GZIPInputStream;
46  import java.util.zip.GZIPOutputStream;
47  import javax.persistence.EntityManager;
48  import javax.persistence.EntityTransaction;
49  import javax.persistence.NoResultException;
50  import javax.persistence.Query;
51  import net.jcip.annotations.NotThreadSafe;
52  import org.modeshape.common.util.IoUtil;
53  import org.modeshape.common.util.StringUtil;
54  import org.modeshape.connector.store.jpa.JpaConnectorI18n;
55  import org.modeshape.connector.store.jpa.model.common.WorkspaceEntity;
56  import org.modeshape.connector.store.jpa.util.Namespaces;
57  import org.modeshape.connector.store.jpa.util.Serializer;
58  import org.modeshape.connector.store.jpa.util.Workspaces;
59  import org.modeshape.connector.store.jpa.util.Serializer.LargeValues;
60  import org.modeshape.graph.ModeShapeLexicon;
61  import org.modeshape.graph.ExecutionContext;
62  import org.modeshape.graph.Location;
63  import org.modeshape.graph.connector.LockFailedException;
64  import org.modeshape.graph.connector.map.AbstractMapWorkspace;
65  import org.modeshape.graph.connector.map.MapNode;
66  import org.modeshape.graph.connector.map.MapRepository;
67  import org.modeshape.graph.connector.map.MapRepositoryTransaction;
68  import org.modeshape.graph.connector.map.MapWorkspace;
69  import org.modeshape.graph.property.Binary;
70  import org.modeshape.graph.property.Name;
71  import org.modeshape.graph.property.NameFactory;
72  import org.modeshape.graph.property.Path;
73  import org.modeshape.graph.property.PathFactory;
74  import org.modeshape.graph.property.Property;
75  import org.modeshape.graph.property.PropertyFactory;
76  import org.modeshape.graph.property.PropertyType;
77  import org.modeshape.graph.property.Reference;
78  import org.modeshape.graph.property.UuidFactory;
79  import org.modeshape.graph.property.ValueFactories;
80  import org.modeshape.graph.property.ValueFactory;
81  import org.modeshape.graph.property.Path.Segment;
82  import org.modeshape.graph.request.CompositeRequest;
83  import org.modeshape.graph.request.LockBranchRequest.LockScope;
84  
85  /**
86   * Implementation of {@link MapRepository} for the {@link SimpleModel Simple JPA connector model}. This class exposes a map of
87   * workspace names to {@link Workspace workspaces} and each workspace provides a logical mapping of node UUIDs to {@link JpaNode
88   * nodes}. The {@code JpaNode} class functions as an adapter between the {@link NodeEntity persistent entity for nodes} and the
89   * {@link MapNode map repository interface for nodes}.
90   * <p>
91   * This class differs slightly from the other {@link MapRepository} implementations in that it exists only within the lifetime of
92   * a single {@link EntityManager} (which itself is opened and closed within the lifetime of a single {@link SimpleJpaConnection}.
93   * The other map repository implementations all outlive any particular connection and generally survive for the lifetime of the
94   * ModeShape server.
95   * </p>
96   */
97  public class SimpleJpaRepository extends MapRepository {
98  
99      protected final EntityManager entityManager;
100     protected final Workspaces workspaceEntities;
101     protected final Namespaces namespaceEntities;
102     protected final ExecutionContext context;
103     protected final PathFactory pathFactory;
104     protected final NameFactory nameFactory;
105     private final List<String> predefinedWorkspaceNames;
106     protected final boolean compressData;
107     protected final boolean creatingWorkspacesAllowed;
108     protected final long minimumSizeOfLargeValuesInBytes;
109 
110     public SimpleJpaRepository( String sourceName,
111                                 UUID rootNodeUuid,
112                                 String defaultWorkspaceName,
113                                 String[] predefinedWorkspaceNames,
114                                 EntityManager entityManager,
115                                 ExecutionContext context,
116                                 boolean compressData,
117                                 boolean creatingWorkspacesAllowed,
118                                 long minimumSizeOfLargeValuesInBytes ) {
119         super(sourceName, rootNodeUuid, defaultWorkspaceName);
120 
121         this.context = context;
122         ValueFactories valueFactories = context.getValueFactories();
123         this.nameFactory = valueFactories.getNameFactory();
124         this.pathFactory = valueFactories.getPathFactory();
125         this.predefinedWorkspaceNames = Arrays.asList(predefinedWorkspaceNames);
126         this.compressData = compressData;
127         this.creatingWorkspacesAllowed = creatingWorkspacesAllowed;
128         this.minimumSizeOfLargeValuesInBytes = minimumSizeOfLargeValuesInBytes;
129 
130         this.entityManager = entityManager;
131         workspaceEntities = new Workspaces(entityManager);
132         namespaceEntities = new Namespaces(entityManager);
133         super.initialize();
134     }
135 
136     public SimpleJpaRepository( String sourceName,
137                                 UUID rootNodeUuid,
138                                 EntityManager entityManager,
139                                 ExecutionContext context,
140                                 boolean compressData,
141                                 boolean creatingWorkspacesAllowed,
142                                 long minimumSizeOfLargeValuesInBytes ) {
143         super(sourceName, rootNodeUuid);
144 
145         this.context = context;
146         ValueFactories valueFactories = context.getValueFactories();
147         this.nameFactory = valueFactories.getNameFactory();
148         this.pathFactory = valueFactories.getPathFactory();
149         this.predefinedWorkspaceNames = Collections.emptyList();
150         this.compressData = compressData;
151         this.creatingWorkspacesAllowed = creatingWorkspacesAllowed;
152         this.minimumSizeOfLargeValuesInBytes = minimumSizeOfLargeValuesInBytes;
153 
154         this.entityManager = entityManager;
155         workspaceEntities = new Workspaces(entityManager);
156         namespaceEntities = new Namespaces(entityManager);
157         super.initialize();
158     }
159 
160     /**
161      * Determine whether creating workspaces is allowed.
162      * 
163      * @return true if creating workspace is allowed, or false otherwise
164      * @see org.modeshape.connector.store.jpa.JpaSource#isCreatingWorkspacesAllowed()
165      */
166     final boolean creatingWorkspacesAllowed() {
167         return this.creatingWorkspacesAllowed;
168     }
169 
170     /*
171      * (non-Javadoc)
172      * @see org.modeshape.graph.connector.map.MapRepository#createWorkspace(org.modeshape.graph.ExecutionContext, java.lang.String)
173      */
174     @Override
175     protected MapWorkspace createWorkspace( ExecutionContext context,
176                                             String name ) {
177 
178         WorkspaceEntity entity = workspaceEntities.get(name, false);
179 
180         if (entity != null) {
181             return new Workspace(this, name, entity.getId().intValue());
182         }
183 
184         entity = workspaceEntities.create(name);
185 
186         // Flush to ensure that the entity ID is set
187         entityManager.flush();
188 
189         Workspace workspace = new Workspace(this, name, entity.getId().intValue());
190         workspace.createRootNode();
191 
192         return workspace;
193     }
194 
195     /*
196      * (non-Javadoc)
197      * @see org.modeshape.graph.connector.map.MapRepository#getWorkspace(java.lang.String)
198      */
199     @Override
200     public MapWorkspace getWorkspace( String name ) {
201         MapWorkspace workspace = super.getWorkspace(name);
202         if (workspace != null) return workspace;
203 
204         // There's no such workspace in the local cache, check if one exists in the DB
205         if (name == null) name = getDefaultWorkspaceName();
206         WorkspaceEntity entity = workspaceEntities.get(name, false);
207         if (entity == null) {
208             if (this.predefinedWorkspaceNames.contains(name)) {
209                 return createWorkspace(context, name);
210             }
211 
212             return null;
213         }
214 
215         return new Workspace(this, name, entity.getId());
216     }
217 
218     /*
219      * (non-Javadoc)
220      * @see org.modeshape.graph.connector.map.MapRepository#getWorkspaceNames()
221      */
222     @Override
223     public Set<String> getWorkspaceNames() {
224         Set<String> workspaceNames = new HashSet<String>(super.getWorkspaceNames());
225         workspaceNames.addAll(predefinedWorkspaceNames);
226 
227         return workspaceNames;
228     }
229 
230     /**
231      * {@inheritDoc}
232      * 
233      * @see org.modeshape.graph.connector.map.MapRepository#startTransaction(boolean)
234      */
235     @Override
236     public MapRepositoryTransaction startTransaction( boolean readonly ) {
237         EntityTransaction txn = entityManager.getTransaction();
238         return new SimpleJpaTransaction(txn);
239     }
240 
241     /**
242      * This class provides a logical mapping of UUIDs to {@link JpaNode nodes} within a named workspace.
243      * <p>
244      * Like its enclosing class, this class only survives for the lifetime of a single request (which may be a
245      * {@link CompositeRequest}).
246      * </p>
247      */
248     @SuppressWarnings( "synthetic-access" )
249     protected class Workspace extends AbstractMapWorkspace {
250         private final long workspaceId;
251         private final Map<Path, MapNode> nodesByPath = new HashMap<Path, MapNode>();
252 
253         public Workspace( MapRepository repository,
254                           String name,
255                           long workspaceId ) {
256             super(repository, name);
257 
258             this.workspaceId = workspaceId;
259 
260             // This gets called from the repository for this connector since the repository
261             // already knows whether this workspace existed in the database before this call.
262             // initialize();
263         }
264 
265         void createRootNode() {
266             initialize();
267         }
268 
269         /**
270          * This should copy the subgraph given by the original node and place the new copy under the supplied new parent. Note
271          * that internal references between nodes within the original subgraph must be reflected as internal nodes within the new
272          * subgraph.
273          * 
274          * @param context the context; may not be null
275          * @param original the node to be copied; may not be null
276          * @param newWorkspace the workspace containing the new parent node; may not be null
277          * @param newParent the parent where the copy is to be placed; may not be null
278          * @param desiredName the desired name for the node; if null, the name will be obtained from the original node
279          * @param recursive true if the copy should be recursive
280          * @return the new node, which is the top of the new subgraph
281          */
282         @Override
283         public MapNode copyNode( ExecutionContext context,
284                                  MapNode original,
285                                  MapWorkspace newWorkspace,
286                                  MapNode newParent,
287                                  Name desiredName,
288                                  boolean recursive ) {
289 
290             Map<UUID, UUID> oldToNewUuids = new HashMap<UUID, UUID>();
291             MapNode copyRoot = copyNode(context, original, newWorkspace, newParent, desiredName, true, oldToNewUuids);
292 
293             // Now, adjust any references in the new subgraph to objects in the original subgraph
294             // (because they were internal references, and need to be internal to the new subgraph)
295             PropertyFactory propertyFactory = context.getPropertyFactory();
296             UuidFactory uuidFactory = context.getValueFactories().getUuidFactory();
297             ValueFactory<Reference> referenceFactory = context.getValueFactories().getReferenceFactory();
298             boolean refChanged = false;
299             for (Map.Entry<UUID, UUID> oldToNew : oldToNewUuids.entrySet()) {
300                 MapNode oldNode = this.getNode(oldToNew.getKey());
301                 MapNode newNode = newWorkspace.getNode(oldToNew.getValue());
302                 assert oldNode != null;
303                 assert newNode != null;
304                 // Iterate over the properties of the new ...
305                 for (Map.Entry<Name, Property> entry : newNode.getProperties().entrySet()) {
306                     Property property = entry.getValue();
307                     // Now see if any of the property values are references ...
308                     List<Object> newValues = new ArrayList<Object>();
309                     boolean foundReference = false;
310                     for (Iterator<?> iter = property.getValues(); iter.hasNext();) {
311                         Object value = iter.next();
312                         PropertyType type = PropertyType.discoverType(value);
313                         if (type == PropertyType.REFERENCE) {
314                             UUID oldReferencedUuid = uuidFactory.create(value);
315                             UUID newReferencedUuid = oldToNewUuids.get(oldReferencedUuid);
316                             if (newReferencedUuid != null) {
317                                 newValues.add(referenceFactory.create(newReferencedUuid));
318                                 foundReference = true;
319                                 refChanged = true;
320                             }
321                         } else {
322                             newValues.add(value);
323                         }
324                     }
325                     // If we found at least one reference, we have to build a new Property object ...
326                     if (foundReference) {
327                         Property newProperty = propertyFactory.create(property.getName(), newValues);
328                         entry.setValue(newProperty);
329                     }
330                 }
331 
332                 if (refChanged) {
333                     ((JpaNode)newNode).serializeProperties();
334                 }
335             }
336             return copyRoot;
337         }
338 
339         /*
340          * (non-Javadoc)
341          * @see org.modeshape.graph.connector.map.AbstractMapWorkspace#correctSameNameSiblingIndexes(org.modeshape.graph.ExecutionContext, org.modeshape.graph.connector.map.MapNode, org.modeshape.graph.property.Name)
342          */
343         @Override
344         protected void correctSameNameSiblingIndexes( ExecutionContext context,
345                                                       MapNode parentNode,
346                                                       Name name ) {
347             int snsIndex = 1;
348             int parentIndex = 0;
349             List<MapNode> children = parentNode.getChildren();
350 
351             for (MapNode child : children) {
352                 NodeEntity childNode = ((JpaNode)child).entity;
353                 if (parentIndex != childNode.getIndexInParent()) {
354                     childNode.setIndexInParent(parentIndex);
355                 }
356 
357                 if (name.equals(child.getName().getName())) {
358                     if (snsIndex != childNode.getSameNameSiblingIndex()) {
359                         childNode.setSameNameSiblingIndex(snsIndex);
360                     }
361                     snsIndex++;
362 
363                 }
364                 parentIndex++;
365             }
366 
367         }
368 
369         /**
370          * Adds the given node to the persistent store, replacing any node already in the persistent store with the same UUID.
371          * <p>
372          * Invoking this method causes a database INSERT statement to execute immediately.
373          * </p>
374          * 
375          * @param node the node to add to the persistent store; may not be null
376          */
377         @Override
378         protected void addNodeToMap( MapNode node ) {
379             assert node != null;
380 
381             NodeEntity nodeEntity = ((JpaNode)node).entity;
382             nodeEntity.setWorkspaceId(this.workspaceId);
383             nodeEntity.setReferentialIntegrityEnforced(false);
384 
385             entityManager.persist(nodeEntity);
386         }
387 
388         @Override
389         protected MapNode removeNodeFromMap( UUID nodeUuid ) {
390             throw new IllegalStateException("This code should be unreachable");
391         }
392 
393         /**
394          * Removes the given node and its children from the persistent store using the
395          * {@link SubgraphQuery#deleteSubgraph(boolean) subgraph bulk delete method}.
396          * 
397          * @param node the root of the branch to be removed
398          */
399         @Override
400         protected void removeUuidReference( MapNode node ) {
401             SubgraphQuery branch = SubgraphQuery.create(entityManager, workspaceId, node.getUuid(), 0);
402             branch.deleteSubgraph(true);
403             branch.close();
404         }
405 
406         /*
407          * (non-Javadoc)
408          * @see org.modeshape.graph.connector.map.AbstractMapWorkspace#createMapNode(java.util.UUID)
409          */
410         @Override
411         protected MapNode createMapNode( UUID uuid ) {
412             return new JpaNode(uuid);
413         }
414 
415         /**
416          * Removes all of the nodes in this workspace from the persistent store with a single query.
417          */
418         @Override
419         protected void removeAllNodesFromMap() {
420             Query query = entityManager.createQuery("NodeEntity.deleteAllInWorkspace");
421             query.setParameter("workspaceId", workspaceId);
422             query.executeUpdate();
423         }
424 
425         /*
426          * (non-Javadoc)
427          * @see org.modeshape.graph.connector.map.AbstractMapWorkspace#getNode(java.util.UUID)
428          */
429         @Override
430         public JpaNode getNode( UUID nodeUuid ) {
431             assert nodeUuid != null;
432 
433             Query query = entityManager.createNamedQuery("NodeEntity.findByNodeUuid");
434             query.setParameter("workspaceId", workspaceId);
435             query.setParameter("nodeUuidString", nodeUuid.toString());
436             try {
437                 // Find the parent of the UUID ...
438                 NodeEntity result = (NodeEntity)query.getSingleResult();
439                 return new JpaNode(result);
440             } catch (NoResultException e) {
441                 return null;
442             }
443         }
444 
445         /*
446          * (non-Javadoc)
447          * @see org.modeshape.graph.connector.map.AbstractMapWorkspace#getNode(org.modeshape.graph.property.Path)
448          */
449         @Override
450         public MapNode getNode( Path path ) {
451             MapNode node = nodesByPath.get(path);
452             if (node != null) return node;
453 
454             node = super.getNode(path);
455             nodesByPath.put(path, node);
456             return node;
457         }
458 
459         /**
460          * Retrieves the branch of nodes rooted at the given location using the {@link SubgraphQuery#getNodes(boolean, boolean)
461          * subgraph bulk accessor method}.
462          * 
463          * @param rootLocation the root of the branch of nodes to retrieve
464          * @param maximumDepth the maximum depth to retrieve; a negative number indicates that the entire branch should be
465          *        retrieved
466          * @return the list of nodes in the branch rooted at {@code rootLocation}
467          */
468         public List<MapNode> getBranch( Location rootLocation,
469                                         int maximumDepth ) {
470             assert rootLocation.getUuid() != null || rootLocation.getPath() != null;
471             UUID subgraphRootUuid = rootLocation.getUuid();
472 
473             if (subgraphRootUuid == null) {
474                 MapNode rootNode = getNode(rootLocation.getPath());
475                 subgraphRootUuid = rootNode.getUuid();
476                 assert subgraphRootUuid != null;
477             }
478 
479             SubgraphQuery subgraph = SubgraphQuery.create(entityManager, workspaceId, subgraphRootUuid, maximumDepth);
480 
481             List<NodeEntity> entities = subgraph.getNodes(true, true);
482             List<MapNode> nodes = new ArrayList<MapNode>(entities.size());
483 
484             for (NodeEntity entity : entities) {
485                 nodes.add(new JpaNode(entity));
486             }
487 
488             subgraph.close();
489 
490             return nodes;
491         }
492 
493         /**
494          * This connector does not support connector-level, persistent locking of nodes.
495          * 
496          * @param node
497          * @param lockScope
498          * @param lockTimeoutInMillis
499          * @throws LockFailedException
500          */
501         public void lockNode( MapNode node,
502                               LockScope lockScope,
503                               long lockTimeoutInMillis ) throws LockFailedException {
504             // Locking is not supported by this connector
505         }
506 
507         /**
508          * This connector does not support connector-level, persistent locking of nodes.
509          * 
510          * @param node the node to be unlocked
511          */
512         public void unlockNode( MapNode node ) {
513             // Locking is not supported by this connector
514         }
515 
516     }
517 
518     /**
519      * Adapter between the {@link NodeEntity persistent entity for nodes} and the {@link MapNode map repository interface for
520      * nodes}.
521      */
522     @SuppressWarnings( "synthetic-access" )
523     @NotThreadSafe
524     protected class JpaNode implements MapNode {
525         private final NodeEntity entity;
526         private Map<Name, Property> properties = null;
527 
528         protected JpaNode( NodeEntity entity ) {
529             this.entity = entity;
530         }
531 
532         public JpaNode( UUID uuid ) {
533             this.entity = new NodeEntity();
534             entity.setNodeUuidString(uuid.toString());
535         }
536 
537         private final JpaNode jpaNodeFor( MapNode node ) {
538             if (!(node instanceof JpaNode)) {
539                 throw new IllegalStateException();
540             }
541             return (JpaNode)node;
542         }
543 
544         public void addChild( int index,
545                               MapNode child ) {
546             entity.addChild(index, jpaNodeFor(child).entity);
547         }
548 
549         public void addChild( MapNode child ) {
550             entity.addChild(jpaNodeFor(child).entity);
551         }
552 
553         public List<MapNode> getChildren() {
554             List<MapNode> children = new ArrayList<MapNode>(entity.getChildren().size());
555 
556             for (NodeEntity child : entity.getChildren()) {
557                 children.add(new JpaNode(child));
558             }
559 
560             return Collections.unmodifiableList(children);
561         }
562 
563         public Segment getName() {
564             return pathFactory.createSegment(nameFactory.create(entity.getChildNamespace().getUri(), entity.getChildName()),
565                                              entity.getSameNameSiblingIndex());
566         }
567 
568         public MapNode getParent() {
569             if (entity.getParent() == null) return null;
570             return new JpaNode(entity.getParent());
571         }
572 
573         private void ensurePropertiesLoaded() {
574             if (properties != null) return;
575 
576             Collection<Property> propsCollection = new LinkedList<Property>();
577 
578             if (entity.getData() != null) {
579                 Serializer serializer = new Serializer(context, true);
580                 ObjectInputStream ois = null;
581 
582                 try {
583                     LargeValueSerializer largeValues = new LargeValueSerializer(entity);
584                     ois = new ObjectInputStream(new ByteArrayInputStream(entity.getData()));
585                     serializer.deserializeAllProperties(ois, propsCollection, largeValues);
586 
587                 } catch (IOException ioe) {
588                     throw new IllegalStateException(ioe);
589                 } catch (ClassNotFoundException cnfe) {
590                     throw new IllegalStateException(cnfe);
591                 } finally {
592                     try {
593                         if (ois != null) ois.close();
594                     } catch (Exception ex) {
595                     }
596                 }
597             }
598 
599             PropertyFactory propertyFactory = context.getPropertyFactory();
600             Map<Name, Property> properties = new HashMap<Name, Property>();
601             properties.put(ModeShapeLexicon.UUID, propertyFactory.create(ModeShapeLexicon.UUID, getUuid()));
602             for (Property prop : propsCollection) {
603                 properties.put(prop.getName(), prop);
604             }
605 
606             this.properties = properties;
607         }
608 
609         private void serializeProperties() {
610             Serializer serializer = new Serializer(context, true);
611             ObjectOutputStream oos = null;
612 
613             try {
614                 ByteArrayOutputStream baos = new ByteArrayOutputStream();
615                 oos = new ObjectOutputStream(baos);
616 
617                 LargeValueSerializer largeValues = new LargeValueSerializer(entity);
618                 // dna:uuid prop is in collection but won't be serialized
619                 int numberOfPropertiesToSerialize = properties.size() - 1;
620                 serializer.serializeProperties(oos,
621                                                numberOfPropertiesToSerialize,
622                                                properties.values(),
623                                                largeValues,
624                                                Serializer.NO_REFERENCES_VALUES);
625                 oos.flush();
626                 entity.setData(baos.toByteArray());
627                 entity.setPropertyCount(properties.size());
628             } catch (IOException ioe) {
629                 throw new IllegalStateException(ioe);
630             } finally {
631                 try {
632                     if (oos != null) oos.close();
633                 } catch (Exception ignore) {
634                 }
635             }
636         }
637 
638         public MapNode removeProperty( Name propertyName ) {
639             ensurePropertiesLoaded();
640 
641             if (properties.containsKey(propertyName)) {
642                 properties.remove(propertyName);
643                 serializeProperties();
644             }
645             return this;
646         }
647 
648         public Map<Name, Property> getProperties() {
649             ensurePropertiesLoaded();
650             return properties;
651         }
652 
653         public Property getProperty( ExecutionContext context,
654                                      String name ) {
655             return getProperty(context.getValueFactories().getNameFactory().create(name));
656         }
657 
658         public Property getProperty( Name name ) {
659             ensurePropertiesLoaded();
660             return properties.get(name);
661         }
662 
663         public Set<Name> getUniqueChildNames() {
664             List<NodeEntity> children = entity.getChildren();
665             Set<Name> uniqueNames = new HashSet<Name>(children.size());
666 
667             for (NodeEntity child : children) {
668                 uniqueNames.add(nameFactory.create(child.getChildNamespace().getUri(), child.getChildName()));
669             }
670 
671             return uniqueNames;
672         }
673 
674         public UUID getUuid() {
675             if (entity.getNodeUuidString() == null) return null;
676             return UUID.fromString(entity.getNodeUuidString());
677         }
678 
679         public boolean removeChild( MapNode child ) {
680 
681             /*
682              * The NodeEntity.equals method compares on the Hibernate identifier to avoid
683              * confusing Hibernate.  However, different nodes can be loaded in the same 
684              * session for the same UUID in the same workspace, forcing us to roll our own
685              * implementation of indexOf that tests for the equality of the NodeEntity UUIDs, 
686              * rather than their Hibernate identifiers.
687              */
688             List<NodeEntity> children = entity.getChildren();
689 
690             int index = -1;
691             String childUuidString = jpaNodeFor(child).entity.getNodeUuidString();
692             for (int i = 0; i < children.size(); i++) {
693                 if (childUuidString.equals(children.get(i).getNodeUuidString())) {
694                     index = i;
695                     break;
696                 }
697             }
698 
699             // int index = entity.getChildren().indexOf(jpaNodeFor(child).entity);
700             // assert entity.getChildren().contains(jpaNodeFor(child).entity);
701             if (index < 0) return false;
702 
703             entity.removeChild(index);
704 
705             assert !entity.getChildren().contains(child);
706             assert child.getParent() == null;
707 
708             return true;
709         }
710 
711         public void clearChildren() {
712             entity.getChildren().clear();
713         }
714 
715         public void setName( Segment name ) {
716             entity.setChildNamespace(namespaceEntities.get(name.getName().getNamespaceUri(), true));
717             // entity.setChildNamespace(NamespaceEntity.findByUri(entityManager, name.getName().getNamespaceUri(), true));
718             entity.setChildName(name.getName().getLocalName());
719             entity.setSameNameSiblingIndex(name.getIndex());
720         }
721 
722         public void setParent( MapNode parent ) {
723             if (parent == null) {
724                 entity.setParent(null);
725             } else {
726                 entity.setParent(jpaNodeFor(parent).entity);
727             }
728         }
729 
730         public MapNode setProperty( ExecutionContext context,
731                                     String name,
732                                     Object... values ) {
733             PropertyFactory propertyFactory = context.getPropertyFactory();
734 
735             return this.setProperty(propertyFactory.create(nameFactory.create(name), values));
736         }
737 
738         public MapNode setProperty( Property property ) {
739             ensurePropertiesLoaded();
740 
741             properties.put(property.getName(), property);
742             serializeProperties();
743 
744             return this;
745         }
746 
747         public MapNode setProperties( Iterable<Property> properties ) {
748             ensurePropertiesLoaded();
749 
750             for (Property property : properties) {
751                 this.properties.put(property.getName(), property);
752             }
753 
754             serializeProperties();
755 
756             return this;
757         }
758 
759         @Override
760         public String toString() {
761             if (entity.getNodeUuidString().equals(rootNodeUuid.toString())) return "<root>";
762             return getName().getString() + " (" + entity.getNodeUuidString() + ")";
763         }
764 
765         @Override
766         public boolean equals( Object obj ) {
767             if (!(obj instanceof JpaNode)) return false;
768 
769             JpaNode other = (JpaNode)obj;
770             return entity.getNodeUuidString().equals(other.entity.getNodeUuidString());
771         }
772 
773         @Override
774         public int hashCode() {
775             return entity.getNodeUuidString().hashCode();
776         }
777 
778     }
779 
780     protected class LargeValueSerializer implements LargeValues {
781         private final NodeEntity node;
782         private final Set<String> written;
783 
784         public LargeValueSerializer( NodeEntity entity ) {
785             this.node = entity;
786             this.written = null;
787         }
788 
789         public LargeValueSerializer( NodeEntity entity,
790                                      Set<String> written ) {
791             this.node = entity;
792             this.written = written;
793         }
794 
795         /**
796          * {@inheritDoc}
797          * 
798          * @see org.modeshape.connector.store.jpa.util.Serializer.LargeValues#getMinimumSize()
799          */
800         public long getMinimumSize() {
801             return minimumSizeOfLargeValuesInBytes;
802         }
803 
804         /**
805          * {@inheritDoc}
806          * 
807          * @see org.modeshape.connector.store.jpa.util.Serializer.LargeValues#read(org.modeshape.graph.property.ValueFactories,
808          *      byte[], long)
809          */
810         public Object read( ValueFactories valueFactories,
811                             byte[] hash,
812                             long length ) throws IOException {
813             String hashStr = StringUtil.getHexString(hash);
814             // Find the large value ...
815             LargeValueEntity entity = entityManager.find(LargeValueEntity.class, hashStr);
816             if (entity != null) {
817                 // Find the large value from the existing property entity ...
818                 byte[] data = entity.getData();
819                 if (entity.isCompressed()) {
820                     InputStream stream = new GZIPInputStream(new ByteArrayInputStream(data));
821                     try {
822                         data = IoUtil.readBytes(stream);
823                     } finally {
824                         stream.close();
825                     }
826                 }
827                 return valueFactories.getValueFactory(entity.getType()).create(data);
828             }
829             throw new IOException(JpaConnectorI18n.unableToReadLargeValue.text(getSourceName(), hashStr));
830         }
831 
832         /**
833          * {@inheritDoc}
834          * 
835          * @see org.modeshape.connector.store.jpa.util.Serializer.LargeValues#write(byte[], long,
836          *      org.modeshape.graph.property.PropertyType, java.lang.Object)
837          */
838         public void write( byte[] hash,
839                            long length,
840                            PropertyType type,
841                            Object value ) throws IOException {
842             if (value == null) return;
843             String hashStr = StringUtil.getHexString(hash);
844             if (written != null) written.add(hashStr);
845 
846             // Look for an existing value in the collection ...
847             for (LargeValueEntity existing : node.getLargeValues()) {
848                 if (existing.getHash().equals(hashStr)) {
849                     // Already associated with this properties entity
850                     return;
851                 }
852             }
853             LargeValueEntity entity = entityManager.find(LargeValueEntity.class, hashStr);
854             if (entity == null) {
855                 // We have to create the large value entity ...
856                 entity = new LargeValueEntity();
857                 entity.setCompressed(compressData);
858                 entity.setHash(hashStr);
859                 entity.setLength(length);
860                 entity.setType(type);
861                 ValueFactories factories = context.getValueFactories();
862                 byte[] bytes = null;
863                 switch (type) {
864                     case BINARY:
865                         Binary binary = factories.getBinaryFactory().create(value);
866                         InputStream stream = null;
867                         try {
868                             binary.acquire();
869                             stream = binary.getStream();
870                             if (compressData) stream = new GZIPInputStream(stream);
871                             bytes = IoUtil.readBytes(stream);
872                         } finally {
873                             try {
874                                 if (stream != null) stream.close();
875                             } finally {
876                                 binary.release();
877                             }
878                         }
879                         break;
880                     case URI:
881                         // This will be treated as a string ...
882                     default:
883                         String str = factories.getStringFactory().create(value);
884                         if (compressData) {
885                             ByteArrayOutputStream bs = new ByteArrayOutputStream();
886                             OutputStream strStream = new GZIPOutputStream(bs);
887                             try {
888                                 IoUtil.write(str, strStream);
889                             } finally {
890                                 strStream.close();
891                             }
892                             bytes = bs.toByteArray();
893                         } else {
894                             bytes = str.getBytes();
895                         }
896                         break;
897                 }
898                 entity.setData(bytes);
899                 entityManager.persist(entity);
900             }
901             // Now associate the large value with the properties entity ...
902             assert entity.getHash() != null;
903             node.getLargeValues().add(entity);
904         }
905 
906     }
907 
908 }