1 #ifndef __CEPH_DECODE_H
2 #define __CEPH_DECODE_H
4 #include <asm/unaligned.h>
5 #include <linux/time.h>
11 * void **p pointer to position pointer
12 * void *end pointer to end of buffer (last byte + 1)
15 static inline u64 ceph_decode_64(void **p)
17 u64 v = get_unaligned_le64(*p);
21 static inline u32 ceph_decode_32(void **p)
23 u32 v = get_unaligned_le32(*p);
27 static inline u16 ceph_decode_16(void **p)
29 u16 v = get_unaligned_le16(*p);
33 static inline u8 ceph_decode_8(void **p)
39 static inline void ceph_decode_copy(void **p, void *pv, size_t n)
48 #define ceph_decode_need(p, end, n, bad) \
50 if (unlikely(*(p) + (n) > (end))) \
54 #define ceph_decode_64_safe(p, end, v, bad) \
56 ceph_decode_need(p, end, sizeof(u64), bad); \
57 v = ceph_decode_64(p); \
59 #define ceph_decode_32_safe(p, end, v, bad) \
61 ceph_decode_need(p, end, sizeof(u32), bad); \
62 v = ceph_decode_32(p); \
64 #define ceph_decode_16_safe(p, end, v, bad) \
66 ceph_decode_need(p, end, sizeof(u16), bad); \
67 v = ceph_decode_16(p); \
69 #define ceph_decode_8_safe(p, end, v, bad) \
71 ceph_decode_need(p, end, sizeof(u8), bad); \
72 v = ceph_decode_8(p); \
75 #define ceph_decode_copy_safe(p, end, pv, n, bad) \
77 ceph_decode_need(p, end, n, bad); \
78 ceph_decode_copy(p, pv, n); \
82 * struct ceph_timespec <-> struct timespec
84 static inline void ceph_decode_timespec(struct timespec *ts,
85 const struct ceph_timespec *tv)
87 ts->tv_sec = le32_to_cpu(tv->tv_sec);
88 ts->tv_nsec = le32_to_cpu(tv->tv_nsec);
90 static inline void ceph_encode_timespec(struct ceph_timespec *tv,
91 const struct timespec *ts)
93 tv->tv_sec = cpu_to_le32(ts->tv_sec);
94 tv->tv_nsec = cpu_to_le32(ts->tv_nsec);
98 * sockaddr_storage <-> ceph_sockaddr
100 static inline void ceph_encode_addr(struct ceph_entity_addr *a)
102 __be16 ss_family = htons(a->in_addr.ss_family);
103 a->in_addr.ss_family = *(__u16 *)&ss_family;
105 static inline void ceph_decode_addr(struct ceph_entity_addr *a)
107 __be16 ss_family = *(__be16 *)&a->in_addr.ss_family;
108 a->in_addr.ss_family = ntohs(ss_family);
109 WARN_ON(a->in_addr.ss_family == 512);
115 static inline void ceph_encode_64(void **p, u64 v)
117 put_unaligned_le64(v, (__le64 *)*p);
120 static inline void ceph_encode_32(void **p, u32 v)
122 put_unaligned_le32(v, (__le32 *)*p);
125 static inline void ceph_encode_16(void **p, u16 v)
127 put_unaligned_le16(v, (__le16 *)*p);
130 static inline void ceph_encode_8(void **p, u8 v)
135 static inline void ceph_encode_copy(void **p, const void *s, int len)
142 * filepath, string encoders
144 static inline void ceph_encode_filepath(void **p, void *end,
145 u64 ino, const char *path)
147 u32 len = path ? strlen(path) : 0;
148 BUG_ON(*p + sizeof(ino) + sizeof(len) + len > end);
150 ceph_encode_64(p, ino);
151 ceph_encode_32(p, len);
153 memcpy(*p, path, len);
157 static inline void ceph_encode_string(void **p, void *end,
158 const char *s, u32 len)
160 BUG_ON(*p + sizeof(len) + len > end);
161 ceph_encode_32(p, len);
167 #define ceph_encode_need(p, end, n, bad) \
169 if (unlikely(*(p) + (n) > (end))) \
173 #define ceph_encode_64_safe(p, end, v, bad) \
175 ceph_encode_need(p, end, sizeof(u64), bad); \
176 ceph_encode_64(p, v); \
178 #define ceph_encode_32_safe(p, end, v, bad) \
180 ceph_encode_need(p, end, sizeof(u32), bad); \
181 ceph_encode_32(p, v); \
183 #define ceph_encode_16_safe(p, end, v, bad) \
185 ceph_encode_need(p, end, sizeof(u16), bad); \
186 ceph_encode_16(p, v); \
189 #define ceph_encode_copy_safe(p, end, pv, n, bad) \
191 ceph_encode_need(p, end, n, bad); \
192 ceph_encode_copy(p, pv, n); \
194 #define ceph_encode_string_safe(p, end, s, n, bad) \
196 ceph_encode_need(p, end, n, bad); \
197 ceph_encode_string(p, end, s, n); \