Bug with masking outgoing messages fixed.

Signed-off-by: Martin Sustrik <sustrik@250bpm.com>
diff --git a/src/protocols/reqrep/req.c b/src/protocols/reqrep/req.c
index 27be656..fbe1f45 100644
--- a/src/protocols/reqrep/req.c
+++ b/src/protocols/reqrep/req.c
@@ -625,7 +625,7 @@
     struct nn_pipe *to;
 
     /*  Send the request. */
-    nn_msg_cp (&msg, &self->task.request);
+    nn_msg_cp (&msg, &self->task.request, 0);
     rc = nn_xreq_send_to (&self->xreq.sockbase, &msg, &to);
 
     /*  If the request cannot be sent at the moment wait till
diff --git a/src/protocols/survey/surveyor.c b/src/protocols/survey/surveyor.c
index ed03303..b96d1e1 100644
--- a/src/protocols/survey/surveyor.c
+++ b/src/protocols/survey/surveyor.c
@@ -482,7 +482,7 @@
     int rc;
     struct nn_msg msg;
 
-    nn_msg_cp (&msg, &self->tosend);
+    nn_msg_cp (&msg, &self->tosend, 0);
     rc = nn_xsurveyor_send (&self->xsurveyor.sockbase, &msg);
     errnum_assert (rc == 0, -rc);
 }
diff --git a/src/transports/ws/sws.c b/src/transports/ws/sws.c
index a6c101c..c82f919 100644
--- a/src/transports/ws/sws.c
+++ b/src/transports/ws/sws.c
@@ -165,9 +165,25 @@
     nn_assert_state (sws, NN_SWS_STATE_ACTIVE);
     nn_assert (sws->outstate == NN_SWS_OUTSTATE_IDLE);
 
-    /*  Move the message to the local storage. */
     nn_msg_term (&sws->outmsg);
-    nn_msg_mv (&sws->outmsg, msg);
+
+    /*  Move the message to the local storage. */
+    if (sws->mode == NN_SWS_MODE_SERVER) {
+        
+        nn_msg_mv (&sws->outmsg, msg);
+    }
+    else {
+
+        /*  On the client we have to do hard copy of the message as we are
+            going to mask it. Masking the data in-place would cause other
+            co-owners of the message to access garbled data. */
+        /*  TODO: Can be optimised for exclusively owned messages.
+                   Also we may want to mask the message in manageable chunks
+                   (such as 4kB) so that there's no need to allocate 2x amount
+                   of memory. */
+        nn_msg_cp (&sws->outmsg, msg, 1);
+        nn_msg_term (msg);
+    }
 
     /*  Serialise the message header. */
     sws->outhdr [0] = NN_SWS_FIN | NN_SWS_OPCODE_BINARY;
diff --git a/src/utils/msg.c b/src/utils/msg.c
index 3c45b32..7efa774 100644
--- a/src/utils/msg.c
+++ b/src/utils/msg.c
@@ -52,11 +52,29 @@
     nn_chunkref_mv (&dst->body, &src->body);
 }
 
-void nn_msg_cp (struct nn_msg *dst, struct nn_msg *src)
+void nn_msg_cp (struct nn_msg *dst, struct nn_msg *src, int hard)
 {
-    nn_chunkref_cp (&dst->sphdr, &src->sphdr);
-    nn_chunkref_cp (&dst->hdrs, &src->hdrs);
-    nn_chunkref_cp (&dst->body, &src->body);
+    size_t sz;
+
+    /*  In the case of soft-copy only chunk references are increased.
+        No actual copying happens. */
+    if (!hard) {
+        nn_chunkref_cp (&dst->sphdr, &src->sphdr);
+        nn_chunkref_cp (&dst->hdrs, &src->hdrs);
+        nn_chunkref_cp (&dst->body, &src->body);
+        return;
+    }
+
+    /*  Hard copy. Data are actually copied. */
+    sz = nn_chunkref_size (&src->sphdr);
+    nn_chunkref_init (&dst->sphdr, sz);
+    memcpy (nn_chunkref_data (&dst->sphdr), nn_chunkref_data (&src->sphdr), sz);
+    sz = nn_chunkref_size (&src->hdrs);
+    nn_chunkref_init (&dst->hdrs, sz);
+    memcpy (nn_chunkref_data (&dst->hdrs), nn_chunkref_data (&src->hdrs), sz);
+    sz = nn_chunkref_size (&src->body);
+    nn_chunkref_init (&dst->body, sz);
+    memcpy (nn_chunkref_data (&dst->body), nn_chunkref_data (&src->body), sz);
 }
 
 void nn_msg_bulkcopy_start (struct nn_msg *self, uint32_t copies)
diff --git a/src/utils/msg.h b/src/utils/msg.h
index bf9a44d..8d4cc26 100644
--- a/src/utils/msg.h
+++ b/src/utils/msg.h
@@ -58,7 +58,7 @@
 
 /*  Copies a message from src to dst. dst should not be
     initialised prior to the operation. */
-void nn_msg_cp (struct nn_msg *dst, struct nn_msg *src);
+void nn_msg_cp (struct nn_msg *dst, struct nn_msg *src, int hard);
 
 /*  Bulk copying is done by first invoking nn_msg_bulkcopy_start on the source
     message and specifying how many copies of the message will be made. Then,