0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011 #include <linux/device.h>
0012 #include "rk3288_crypto.h"
0013
0014 #define RK_CRYPTO_DEC BIT(0)
0015
0016 static void rk_crypto_complete(struct crypto_async_request *base, int err)
0017 {
0018 if (base->complete)
0019 base->complete(base, err);
0020 }
0021
0022 static int rk_handle_req(struct rk_crypto_info *dev,
0023 struct skcipher_request *req)
0024 {
0025 if (!IS_ALIGNED(req->cryptlen, dev->align_size))
0026 return -EINVAL;
0027 else
0028 return dev->enqueue(dev, &req->base);
0029 }
0030
0031 static int rk_aes_setkey(struct crypto_skcipher *cipher,
0032 const u8 *key, unsigned int keylen)
0033 {
0034 struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher);
0035 struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
0036
0037 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
0038 keylen != AES_KEYSIZE_256)
0039 return -EINVAL;
0040 ctx->keylen = keylen;
0041 memcpy_toio(ctx->dev->reg + RK_CRYPTO_AES_KEY_0, key, keylen);
0042 return 0;
0043 }
0044
0045 static int rk_des_setkey(struct crypto_skcipher *cipher,
0046 const u8 *key, unsigned int keylen)
0047 {
0048 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
0049 int err;
0050
0051 err = verify_skcipher_des_key(cipher, key);
0052 if (err)
0053 return err;
0054
0055 ctx->keylen = keylen;
0056 memcpy_toio(ctx->dev->reg + RK_CRYPTO_TDES_KEY1_0, key, keylen);
0057 return 0;
0058 }
0059
0060 static int rk_tdes_setkey(struct crypto_skcipher *cipher,
0061 const u8 *key, unsigned int keylen)
0062 {
0063 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
0064 int err;
0065
0066 err = verify_skcipher_des3_key(cipher, key);
0067 if (err)
0068 return err;
0069
0070 ctx->keylen = keylen;
0071 memcpy_toio(ctx->dev->reg + RK_CRYPTO_TDES_KEY1_0, key, keylen);
0072 return 0;
0073 }
0074
0075 static int rk_aes_ecb_encrypt(struct skcipher_request *req)
0076 {
0077 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0078 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
0079 struct rk_crypto_info *dev = ctx->dev;
0080
0081 ctx->mode = RK_CRYPTO_AES_ECB_MODE;
0082 return rk_handle_req(dev, req);
0083 }
0084
0085 static int rk_aes_ecb_decrypt(struct skcipher_request *req)
0086 {
0087 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0088 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
0089 struct rk_crypto_info *dev = ctx->dev;
0090
0091 ctx->mode = RK_CRYPTO_AES_ECB_MODE | RK_CRYPTO_DEC;
0092 return rk_handle_req(dev, req);
0093 }
0094
0095 static int rk_aes_cbc_encrypt(struct skcipher_request *req)
0096 {
0097 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0098 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
0099 struct rk_crypto_info *dev = ctx->dev;
0100
0101 ctx->mode = RK_CRYPTO_AES_CBC_MODE;
0102 return rk_handle_req(dev, req);
0103 }
0104
0105 static int rk_aes_cbc_decrypt(struct skcipher_request *req)
0106 {
0107 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0108 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
0109 struct rk_crypto_info *dev = ctx->dev;
0110
0111 ctx->mode = RK_CRYPTO_AES_CBC_MODE | RK_CRYPTO_DEC;
0112 return rk_handle_req(dev, req);
0113 }
0114
0115 static int rk_des_ecb_encrypt(struct skcipher_request *req)
0116 {
0117 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0118 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
0119 struct rk_crypto_info *dev = ctx->dev;
0120
0121 ctx->mode = 0;
0122 return rk_handle_req(dev, req);
0123 }
0124
0125 static int rk_des_ecb_decrypt(struct skcipher_request *req)
0126 {
0127 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0128 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
0129 struct rk_crypto_info *dev = ctx->dev;
0130
0131 ctx->mode = RK_CRYPTO_DEC;
0132 return rk_handle_req(dev, req);
0133 }
0134
0135 static int rk_des_cbc_encrypt(struct skcipher_request *req)
0136 {
0137 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0138 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
0139 struct rk_crypto_info *dev = ctx->dev;
0140
0141 ctx->mode = RK_CRYPTO_TDES_CHAINMODE_CBC;
0142 return rk_handle_req(dev, req);
0143 }
0144
0145 static int rk_des_cbc_decrypt(struct skcipher_request *req)
0146 {
0147 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0148 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
0149 struct rk_crypto_info *dev = ctx->dev;
0150
0151 ctx->mode = RK_CRYPTO_TDES_CHAINMODE_CBC | RK_CRYPTO_DEC;
0152 return rk_handle_req(dev, req);
0153 }
0154
0155 static int rk_des3_ede_ecb_encrypt(struct skcipher_request *req)
0156 {
0157 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0158 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
0159 struct rk_crypto_info *dev = ctx->dev;
0160
0161 ctx->mode = RK_CRYPTO_TDES_SELECT;
0162 return rk_handle_req(dev, req);
0163 }
0164
0165 static int rk_des3_ede_ecb_decrypt(struct skcipher_request *req)
0166 {
0167 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0168 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
0169 struct rk_crypto_info *dev = ctx->dev;
0170
0171 ctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_DEC;
0172 return rk_handle_req(dev, req);
0173 }
0174
0175 static int rk_des3_ede_cbc_encrypt(struct skcipher_request *req)
0176 {
0177 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0178 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
0179 struct rk_crypto_info *dev = ctx->dev;
0180
0181 ctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC;
0182 return rk_handle_req(dev, req);
0183 }
0184
0185 static int rk_des3_ede_cbc_decrypt(struct skcipher_request *req)
0186 {
0187 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0188 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
0189 struct rk_crypto_info *dev = ctx->dev;
0190
0191 ctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC |
0192 RK_CRYPTO_DEC;
0193 return rk_handle_req(dev, req);
0194 }
0195
0196 static void rk_ablk_hw_init(struct rk_crypto_info *dev)
0197 {
0198 struct skcipher_request *req =
0199 skcipher_request_cast(dev->async_req);
0200 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
0201 struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher);
0202 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
0203 u32 ivsize, block, conf_reg = 0;
0204
0205 block = crypto_tfm_alg_blocksize(tfm);
0206 ivsize = crypto_skcipher_ivsize(cipher);
0207
0208 if (block == DES_BLOCK_SIZE) {
0209 ctx->mode |= RK_CRYPTO_TDES_FIFO_MODE |
0210 RK_CRYPTO_TDES_BYTESWAP_KEY |
0211 RK_CRYPTO_TDES_BYTESWAP_IV;
0212 CRYPTO_WRITE(dev, RK_CRYPTO_TDES_CTRL, ctx->mode);
0213 memcpy_toio(dev->reg + RK_CRYPTO_TDES_IV_0, req->iv, ivsize);
0214 conf_reg = RK_CRYPTO_DESSEL;
0215 } else {
0216 ctx->mode |= RK_CRYPTO_AES_FIFO_MODE |
0217 RK_CRYPTO_AES_KEY_CHANGE |
0218 RK_CRYPTO_AES_BYTESWAP_KEY |
0219 RK_CRYPTO_AES_BYTESWAP_IV;
0220 if (ctx->keylen == AES_KEYSIZE_192)
0221 ctx->mode |= RK_CRYPTO_AES_192BIT_key;
0222 else if (ctx->keylen == AES_KEYSIZE_256)
0223 ctx->mode |= RK_CRYPTO_AES_256BIT_key;
0224 CRYPTO_WRITE(dev, RK_CRYPTO_AES_CTRL, ctx->mode);
0225 memcpy_toio(dev->reg + RK_CRYPTO_AES_IV_0, req->iv, ivsize);
0226 }
0227 conf_reg |= RK_CRYPTO_BYTESWAP_BTFIFO |
0228 RK_CRYPTO_BYTESWAP_BRFIFO;
0229 CRYPTO_WRITE(dev, RK_CRYPTO_CONF, conf_reg);
0230 CRYPTO_WRITE(dev, RK_CRYPTO_INTENA,
0231 RK_CRYPTO_BCDMA_ERR_ENA | RK_CRYPTO_BCDMA_DONE_ENA);
0232 }
0233
0234 static void crypto_dma_start(struct rk_crypto_info *dev)
0235 {
0236 CRYPTO_WRITE(dev, RK_CRYPTO_BRDMAS, dev->addr_in);
0237 CRYPTO_WRITE(dev, RK_CRYPTO_BRDMAL, dev->count / 4);
0238 CRYPTO_WRITE(dev, RK_CRYPTO_BTDMAS, dev->addr_out);
0239 CRYPTO_WRITE(dev, RK_CRYPTO_CTRL, RK_CRYPTO_BLOCK_START |
0240 _SBF(RK_CRYPTO_BLOCK_START, 16));
0241 }
0242
0243 static int rk_set_data_start(struct rk_crypto_info *dev)
0244 {
0245 int err;
0246 struct skcipher_request *req =
0247 skcipher_request_cast(dev->async_req);
0248 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0249 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
0250 u32 ivsize = crypto_skcipher_ivsize(tfm);
0251 u8 *src_last_blk = page_address(sg_page(dev->sg_src)) +
0252 dev->sg_src->offset + dev->sg_src->length - ivsize;
0253
0254
0255
0256
0257 if (ctx->mode & RK_CRYPTO_DEC) {
0258 memcpy(ctx->iv, src_last_blk, ivsize);
0259 sg_pcopy_to_buffer(dev->first, dev->src_nents, req->iv,
0260 ivsize, dev->total - ivsize);
0261 }
0262
0263 err = dev->load_data(dev, dev->sg_src, dev->sg_dst);
0264 if (!err)
0265 crypto_dma_start(dev);
0266 return err;
0267 }
0268
0269 static int rk_ablk_start(struct rk_crypto_info *dev)
0270 {
0271 struct skcipher_request *req =
0272 skcipher_request_cast(dev->async_req);
0273 unsigned long flags;
0274 int err = 0;
0275
0276 dev->left_bytes = req->cryptlen;
0277 dev->total = req->cryptlen;
0278 dev->sg_src = req->src;
0279 dev->first = req->src;
0280 dev->src_nents = sg_nents(req->src);
0281 dev->sg_dst = req->dst;
0282 dev->dst_nents = sg_nents(req->dst);
0283 dev->aligned = 1;
0284
0285 spin_lock_irqsave(&dev->lock, flags);
0286 rk_ablk_hw_init(dev);
0287 err = rk_set_data_start(dev);
0288 spin_unlock_irqrestore(&dev->lock, flags);
0289 return err;
0290 }
0291
0292 static void rk_iv_copyback(struct rk_crypto_info *dev)
0293 {
0294 struct skcipher_request *req =
0295 skcipher_request_cast(dev->async_req);
0296 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0297 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
0298 u32 ivsize = crypto_skcipher_ivsize(tfm);
0299
0300
0301 if (!(ctx->mode & RK_CRYPTO_DEC)) {
0302 if (dev->aligned) {
0303 memcpy(req->iv, sg_virt(dev->sg_dst) +
0304 dev->sg_dst->length - ivsize, ivsize);
0305 } else {
0306 memcpy(req->iv, dev->addr_vir +
0307 dev->count - ivsize, ivsize);
0308 }
0309 }
0310 }
0311
0312 static void rk_update_iv(struct rk_crypto_info *dev)
0313 {
0314 struct skcipher_request *req =
0315 skcipher_request_cast(dev->async_req);
0316 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
0317 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
0318 u32 ivsize = crypto_skcipher_ivsize(tfm);
0319 u8 *new_iv = NULL;
0320
0321 if (ctx->mode & RK_CRYPTO_DEC) {
0322 new_iv = ctx->iv;
0323 } else {
0324 new_iv = page_address(sg_page(dev->sg_dst)) +
0325 dev->sg_dst->offset + dev->sg_dst->length - ivsize;
0326 }
0327
0328 if (ivsize == DES_BLOCK_SIZE)
0329 memcpy_toio(dev->reg + RK_CRYPTO_TDES_IV_0, new_iv, ivsize);
0330 else if (ivsize == AES_BLOCK_SIZE)
0331 memcpy_toio(dev->reg + RK_CRYPTO_AES_IV_0, new_iv, ivsize);
0332 }
0333
0334
0335
0336
0337
0338 static int rk_ablk_rx(struct rk_crypto_info *dev)
0339 {
0340 int err = 0;
0341 struct skcipher_request *req =
0342 skcipher_request_cast(dev->async_req);
0343
0344 dev->unload_data(dev);
0345 if (!dev->aligned) {
0346 if (!sg_pcopy_from_buffer(req->dst, dev->dst_nents,
0347 dev->addr_vir, dev->count,
0348 dev->total - dev->left_bytes -
0349 dev->count)) {
0350 err = -EINVAL;
0351 goto out_rx;
0352 }
0353 }
0354 if (dev->left_bytes) {
0355 rk_update_iv(dev);
0356 if (dev->aligned) {
0357 if (sg_is_last(dev->sg_src)) {
0358 dev_err(dev->dev, "[%s:%d] Lack of data\n",
0359 __func__, __LINE__);
0360 err = -ENOMEM;
0361 goto out_rx;
0362 }
0363 dev->sg_src = sg_next(dev->sg_src);
0364 dev->sg_dst = sg_next(dev->sg_dst);
0365 }
0366 err = rk_set_data_start(dev);
0367 } else {
0368 rk_iv_copyback(dev);
0369
0370 dev->complete(dev->async_req, 0);
0371 tasklet_schedule(&dev->queue_task);
0372 }
0373 out_rx:
0374 return err;
0375 }
0376
0377 static int rk_ablk_init_tfm(struct crypto_skcipher *tfm)
0378 {
0379 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
0380 struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
0381 struct rk_crypto_tmp *algt;
0382
0383 algt = container_of(alg, struct rk_crypto_tmp, alg.skcipher);
0384
0385 ctx->dev = algt->dev;
0386 ctx->dev->align_size = crypto_tfm_alg_alignmask(crypto_skcipher_tfm(tfm)) + 1;
0387 ctx->dev->start = rk_ablk_start;
0388 ctx->dev->update = rk_ablk_rx;
0389 ctx->dev->complete = rk_crypto_complete;
0390 ctx->dev->addr_vir = (char *)__get_free_page(GFP_KERNEL);
0391
0392 return ctx->dev->addr_vir ? ctx->dev->enable_clk(ctx->dev) : -ENOMEM;
0393 }
0394
0395 static void rk_ablk_exit_tfm(struct crypto_skcipher *tfm)
0396 {
0397 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
0398
0399 free_page((unsigned long)ctx->dev->addr_vir);
0400 ctx->dev->disable_clk(ctx->dev);
0401 }
0402
0403 struct rk_crypto_tmp rk_ecb_aes_alg = {
0404 .type = ALG_TYPE_CIPHER,
0405 .alg.skcipher = {
0406 .base.cra_name = "ecb(aes)",
0407 .base.cra_driver_name = "ecb-aes-rk",
0408 .base.cra_priority = 300,
0409 .base.cra_flags = CRYPTO_ALG_ASYNC,
0410 .base.cra_blocksize = AES_BLOCK_SIZE,
0411 .base.cra_ctxsize = sizeof(struct rk_cipher_ctx),
0412 .base.cra_alignmask = 0x0f,
0413 .base.cra_module = THIS_MODULE,
0414
0415 .init = rk_ablk_init_tfm,
0416 .exit = rk_ablk_exit_tfm,
0417 .min_keysize = AES_MIN_KEY_SIZE,
0418 .max_keysize = AES_MAX_KEY_SIZE,
0419 .setkey = rk_aes_setkey,
0420 .encrypt = rk_aes_ecb_encrypt,
0421 .decrypt = rk_aes_ecb_decrypt,
0422 }
0423 };
0424
0425 struct rk_crypto_tmp rk_cbc_aes_alg = {
0426 .type = ALG_TYPE_CIPHER,
0427 .alg.skcipher = {
0428 .base.cra_name = "cbc(aes)",
0429 .base.cra_driver_name = "cbc-aes-rk",
0430 .base.cra_priority = 300,
0431 .base.cra_flags = CRYPTO_ALG_ASYNC,
0432 .base.cra_blocksize = AES_BLOCK_SIZE,
0433 .base.cra_ctxsize = sizeof(struct rk_cipher_ctx),
0434 .base.cra_alignmask = 0x0f,
0435 .base.cra_module = THIS_MODULE,
0436
0437 .init = rk_ablk_init_tfm,
0438 .exit = rk_ablk_exit_tfm,
0439 .min_keysize = AES_MIN_KEY_SIZE,
0440 .max_keysize = AES_MAX_KEY_SIZE,
0441 .ivsize = AES_BLOCK_SIZE,
0442 .setkey = rk_aes_setkey,
0443 .encrypt = rk_aes_cbc_encrypt,
0444 .decrypt = rk_aes_cbc_decrypt,
0445 }
0446 };
0447
0448 struct rk_crypto_tmp rk_ecb_des_alg = {
0449 .type = ALG_TYPE_CIPHER,
0450 .alg.skcipher = {
0451 .base.cra_name = "ecb(des)",
0452 .base.cra_driver_name = "ecb-des-rk",
0453 .base.cra_priority = 300,
0454 .base.cra_flags = CRYPTO_ALG_ASYNC,
0455 .base.cra_blocksize = DES_BLOCK_SIZE,
0456 .base.cra_ctxsize = sizeof(struct rk_cipher_ctx),
0457 .base.cra_alignmask = 0x07,
0458 .base.cra_module = THIS_MODULE,
0459
0460 .init = rk_ablk_init_tfm,
0461 .exit = rk_ablk_exit_tfm,
0462 .min_keysize = DES_KEY_SIZE,
0463 .max_keysize = DES_KEY_SIZE,
0464 .setkey = rk_des_setkey,
0465 .encrypt = rk_des_ecb_encrypt,
0466 .decrypt = rk_des_ecb_decrypt,
0467 }
0468 };
0469
0470 struct rk_crypto_tmp rk_cbc_des_alg = {
0471 .type = ALG_TYPE_CIPHER,
0472 .alg.skcipher = {
0473 .base.cra_name = "cbc(des)",
0474 .base.cra_driver_name = "cbc-des-rk",
0475 .base.cra_priority = 300,
0476 .base.cra_flags = CRYPTO_ALG_ASYNC,
0477 .base.cra_blocksize = DES_BLOCK_SIZE,
0478 .base.cra_ctxsize = sizeof(struct rk_cipher_ctx),
0479 .base.cra_alignmask = 0x07,
0480 .base.cra_module = THIS_MODULE,
0481
0482 .init = rk_ablk_init_tfm,
0483 .exit = rk_ablk_exit_tfm,
0484 .min_keysize = DES_KEY_SIZE,
0485 .max_keysize = DES_KEY_SIZE,
0486 .ivsize = DES_BLOCK_SIZE,
0487 .setkey = rk_des_setkey,
0488 .encrypt = rk_des_cbc_encrypt,
0489 .decrypt = rk_des_cbc_decrypt,
0490 }
0491 };
0492
0493 struct rk_crypto_tmp rk_ecb_des3_ede_alg = {
0494 .type = ALG_TYPE_CIPHER,
0495 .alg.skcipher = {
0496 .base.cra_name = "ecb(des3_ede)",
0497 .base.cra_driver_name = "ecb-des3-ede-rk",
0498 .base.cra_priority = 300,
0499 .base.cra_flags = CRYPTO_ALG_ASYNC,
0500 .base.cra_blocksize = DES_BLOCK_SIZE,
0501 .base.cra_ctxsize = sizeof(struct rk_cipher_ctx),
0502 .base.cra_alignmask = 0x07,
0503 .base.cra_module = THIS_MODULE,
0504
0505 .init = rk_ablk_init_tfm,
0506 .exit = rk_ablk_exit_tfm,
0507 .min_keysize = DES3_EDE_KEY_SIZE,
0508 .max_keysize = DES3_EDE_KEY_SIZE,
0509 .setkey = rk_tdes_setkey,
0510 .encrypt = rk_des3_ede_ecb_encrypt,
0511 .decrypt = rk_des3_ede_ecb_decrypt,
0512 }
0513 };
0514
0515 struct rk_crypto_tmp rk_cbc_des3_ede_alg = {
0516 .type = ALG_TYPE_CIPHER,
0517 .alg.skcipher = {
0518 .base.cra_name = "cbc(des3_ede)",
0519 .base.cra_driver_name = "cbc-des3-ede-rk",
0520 .base.cra_priority = 300,
0521 .base.cra_flags = CRYPTO_ALG_ASYNC,
0522 .base.cra_blocksize = DES_BLOCK_SIZE,
0523 .base.cra_ctxsize = sizeof(struct rk_cipher_ctx),
0524 .base.cra_alignmask = 0x07,
0525 .base.cra_module = THIS_MODULE,
0526
0527 .init = rk_ablk_init_tfm,
0528 .exit = rk_ablk_exit_tfm,
0529 .min_keysize = DES3_EDE_KEY_SIZE,
0530 .max_keysize = DES3_EDE_KEY_SIZE,
0531 .ivsize = DES_BLOCK_SIZE,
0532 .setkey = rk_tdes_setkey,
0533 .encrypt = rk_des3_ede_cbc_encrypt,
0534 .decrypt = rk_des3_ede_cbc_decrypt,
0535 }
0536 };